var/home/core/zuul-output/0000755000175000017500000000000015113234370014524 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113251677015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004747206715113251666017724 0ustar rootrootDec 01 06:50:43 crc systemd[1]: Starting Kubernetes Kubelet... Dec 01 06:50:43 crc restorecon[4817]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:43 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:50:44 crc restorecon[4817]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 01 06:50:44 crc kubenswrapper[4822]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:50:44 crc kubenswrapper[4822]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 01 06:50:44 crc kubenswrapper[4822]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:50:44 crc kubenswrapper[4822]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:50:44 crc kubenswrapper[4822]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 01 06:50:44 crc kubenswrapper[4822]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.788996 4822 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.795536 4822 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.795720 4822 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.795827 4822 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.795927 4822 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796024 4822 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796142 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796242 4822 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796341 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796437 4822 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796544 4822 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796691 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796793 4822 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796890 4822 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.796987 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797082 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797178 4822 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797274 4822 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797381 4822 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797479 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797657 4822 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797765 4822 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797863 4822 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.797958 4822 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798054 4822 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798168 4822 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798267 4822 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798362 4822 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798468 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798607 4822 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798713 4822 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798811 4822 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.798923 4822 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799021 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799127 4822 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799234 4822 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799331 4822 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799426 4822 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799521 4822 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799679 4822 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799794 4822 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.799895 4822 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800001 4822 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800102 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800199 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800295 4822 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800408 4822 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800513 4822 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800662 4822 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800769 4822 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.800918 4822 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801025 4822 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801123 4822 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801230 4822 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801335 4822 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801432 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801582 4822 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801717 4822 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801818 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.801917 4822 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802032 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802132 4822 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802231 4822 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802327 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802423 4822 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802584 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802710 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802812 4822 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.802909 4822 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.803006 4822 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.803103 4822 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.803198 4822 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.803771 4822 flags.go:64] FLAG: --address="0.0.0.0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.803942 4822 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.804353 4822 flags.go:64] FLAG: --anonymous-auth="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.804476 4822 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.804615 4822 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.804724 4822 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.804828 4822 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.804930 4822 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805031 4822 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805151 4822 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805255 4822 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805354 4822 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805452 4822 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805575 4822 flags.go:64] FLAG: --cgroup-root="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805768 4822 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805874 4822 flags.go:64] FLAG: --client-ca-file="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.805983 4822 flags.go:64] FLAG: --cloud-config="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806085 4822 flags.go:64] FLAG: --cloud-provider="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806182 4822 flags.go:64] FLAG: --cluster-dns="[]" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806297 4822 flags.go:64] FLAG: --cluster-domain="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806406 4822 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806507 4822 flags.go:64] FLAG: --config-dir="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806658 4822 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806705 4822 flags.go:64] FLAG: --container-log-max-files="5" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806715 4822 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806721 4822 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806728 4822 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806735 4822 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806739 4822 flags.go:64] FLAG: --contention-profiling="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806746 4822 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806750 4822 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806757 4822 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806761 4822 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806768 4822 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806772 4822 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806777 4822 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806781 4822 flags.go:64] FLAG: --enable-load-reader="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806786 4822 flags.go:64] FLAG: --enable-server="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806791 4822 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806824 4822 flags.go:64] FLAG: --event-burst="100" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806829 4822 flags.go:64] FLAG: --event-qps="50" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806833 4822 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806838 4822 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806842 4822 flags.go:64] FLAG: --eviction-hard="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806854 4822 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806859 4822 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806863 4822 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806867 4822 flags.go:64] FLAG: --eviction-soft="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806872 4822 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806876 4822 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806880 4822 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806884 4822 flags.go:64] FLAG: --experimental-mounter-path="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806889 4822 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806893 4822 flags.go:64] FLAG: --fail-swap-on="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806897 4822 flags.go:64] FLAG: --feature-gates="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806903 4822 flags.go:64] FLAG: --file-check-frequency="20s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806907 4822 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806912 4822 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806918 4822 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806925 4822 flags.go:64] FLAG: --healthz-port="10248" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806930 4822 flags.go:64] FLAG: --help="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806935 4822 flags.go:64] FLAG: --hostname-override="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806940 4822 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806945 4822 flags.go:64] FLAG: --http-check-frequency="20s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806950 4822 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806955 4822 flags.go:64] FLAG: --image-credential-provider-config="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806960 4822 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806965 4822 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806970 4822 flags.go:64] FLAG: --image-service-endpoint="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806976 4822 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806980 4822 flags.go:64] FLAG: --kube-api-burst="100" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806985 4822 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806990 4822 flags.go:64] FLAG: --kube-api-qps="50" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806995 4822 flags.go:64] FLAG: --kube-reserved="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.806999 4822 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807004 4822 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807008 4822 flags.go:64] FLAG: --kubelet-cgroups="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807013 4822 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807017 4822 flags.go:64] FLAG: --lock-file="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807021 4822 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807025 4822 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807029 4822 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807037 4822 flags.go:64] FLAG: --log-json-split-stream="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807041 4822 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807045 4822 flags.go:64] FLAG: --log-text-split-stream="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807049 4822 flags.go:64] FLAG: --logging-format="text" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807053 4822 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807058 4822 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807062 4822 flags.go:64] FLAG: --manifest-url="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807068 4822 flags.go:64] FLAG: --manifest-url-header="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807076 4822 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807082 4822 flags.go:64] FLAG: --max-open-files="1000000" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807088 4822 flags.go:64] FLAG: --max-pods="110" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807092 4822 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807096 4822 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807101 4822 flags.go:64] FLAG: --memory-manager-policy="None" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807105 4822 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807110 4822 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807114 4822 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807118 4822 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807133 4822 flags.go:64] FLAG: --node-status-max-images="50" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807137 4822 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807141 4822 flags.go:64] FLAG: --oom-score-adj="-999" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807145 4822 flags.go:64] FLAG: --pod-cidr="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807149 4822 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807158 4822 flags.go:64] FLAG: --pod-manifest-path="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807162 4822 flags.go:64] FLAG: --pod-max-pids="-1" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807166 4822 flags.go:64] FLAG: --pods-per-core="0" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807170 4822 flags.go:64] FLAG: --port="10250" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807175 4822 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807179 4822 flags.go:64] FLAG: --provider-id="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807184 4822 flags.go:64] FLAG: --qos-reserved="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807188 4822 flags.go:64] FLAG: --read-only-port="10255" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807192 4822 flags.go:64] FLAG: --register-node="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807196 4822 flags.go:64] FLAG: --register-schedulable="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807200 4822 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807209 4822 flags.go:64] FLAG: --registry-burst="10" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807213 4822 flags.go:64] FLAG: --registry-qps="5" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807217 4822 flags.go:64] FLAG: --reserved-cpus="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807221 4822 flags.go:64] FLAG: --reserved-memory="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807227 4822 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807232 4822 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807236 4822 flags.go:64] FLAG: --rotate-certificates="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807241 4822 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807245 4822 flags.go:64] FLAG: --runonce="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807249 4822 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807254 4822 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807258 4822 flags.go:64] FLAG: --seccomp-default="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807263 4822 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807269 4822 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807274 4822 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807280 4822 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807284 4822 flags.go:64] FLAG: --storage-driver-password="root" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807289 4822 flags.go:64] FLAG: --storage-driver-secure="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807293 4822 flags.go:64] FLAG: --storage-driver-table="stats" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807297 4822 flags.go:64] FLAG: --storage-driver-user="root" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807301 4822 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807305 4822 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807310 4822 flags.go:64] FLAG: --system-cgroups="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807313 4822 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807321 4822 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807325 4822 flags.go:64] FLAG: --tls-cert-file="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807329 4822 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807335 4822 flags.go:64] FLAG: --tls-min-version="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807339 4822 flags.go:64] FLAG: --tls-private-key-file="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807343 4822 flags.go:64] FLAG: --topology-manager-policy="none" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807348 4822 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807352 4822 flags.go:64] FLAG: --topology-manager-scope="container" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807357 4822 flags.go:64] FLAG: --v="2" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807363 4822 flags.go:64] FLAG: --version="false" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807369 4822 flags.go:64] FLAG: --vmodule="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807375 4822 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807379 4822 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807531 4822 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807536 4822 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807541 4822 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807561 4822 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807565 4822 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807568 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807572 4822 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807576 4822 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807580 4822 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807583 4822 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807587 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807590 4822 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807594 4822 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807597 4822 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807601 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807604 4822 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807607 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807611 4822 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807614 4822 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807617 4822 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807621 4822 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807625 4822 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807629 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807632 4822 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807637 4822 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807640 4822 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807644 4822 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807647 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807651 4822 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807654 4822 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807660 4822 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807665 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807669 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807673 4822 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807678 4822 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807682 4822 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807686 4822 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807690 4822 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807694 4822 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807699 4822 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807702 4822 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807706 4822 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807709 4822 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807712 4822 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807716 4822 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807719 4822 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807723 4822 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807726 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807730 4822 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807733 4822 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807737 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807740 4822 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807749 4822 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807753 4822 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807757 4822 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807760 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807763 4822 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807767 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807771 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807777 4822 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807781 4822 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807786 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807789 4822 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807793 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807798 4822 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807803 4822 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807810 4822 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807815 4822 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807819 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807823 4822 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.807826 4822 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.807841 4822 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.817044 4822 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.817076 4822 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817152 4822 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817161 4822 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817168 4822 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817174 4822 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817181 4822 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817187 4822 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817192 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817197 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817202 4822 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817207 4822 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817212 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817216 4822 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817221 4822 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817226 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817231 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817236 4822 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817241 4822 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817245 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817250 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817257 4822 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817263 4822 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817279 4822 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817284 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817289 4822 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817294 4822 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817300 4822 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817305 4822 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817310 4822 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817317 4822 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817322 4822 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817329 4822 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817336 4822 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817342 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817347 4822 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817352 4822 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817357 4822 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817362 4822 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817367 4822 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817372 4822 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817377 4822 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817382 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817387 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817392 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817397 4822 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817402 4822 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817409 4822 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817415 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817422 4822 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817429 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817435 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817440 4822 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817445 4822 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817450 4822 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817455 4822 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817460 4822 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817465 4822 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817470 4822 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817488 4822 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817494 4822 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817498 4822 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817505 4822 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817511 4822 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817517 4822 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817522 4822 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817527 4822 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817532 4822 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817537 4822 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817542 4822 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817563 4822 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817568 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817573 4822 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.817581 4822 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817767 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817777 4822 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817782 4822 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817788 4822 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817793 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817800 4822 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817805 4822 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817811 4822 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817816 4822 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817821 4822 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817826 4822 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817831 4822 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817836 4822 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817841 4822 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817845 4822 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817850 4822 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817855 4822 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817860 4822 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817864 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817869 4822 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817876 4822 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817882 4822 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817888 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817893 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817898 4822 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817903 4822 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817908 4822 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817912 4822 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817917 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817922 4822 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817927 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817932 4822 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817936 4822 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817942 4822 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817947 4822 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817952 4822 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817958 4822 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817965 4822 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817971 4822 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817976 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817981 4822 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817987 4822 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817993 4822 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.817999 4822 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818004 4822 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818010 4822 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818016 4822 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818021 4822 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818026 4822 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818032 4822 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818037 4822 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818043 4822 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818049 4822 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818055 4822 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818061 4822 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818066 4822 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818072 4822 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818078 4822 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818083 4822 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818088 4822 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818093 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818098 4822 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818103 4822 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818108 4822 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818113 4822 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818118 4822 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818124 4822 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818129 4822 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818134 4822 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818138 4822 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.818143 4822 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.818151 4822 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.818491 4822 server.go:940] "Client rotation is on, will bootstrap in background" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.821846 4822 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.821935 4822 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.822477 4822 server.go:997] "Starting client certificate rotation" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.822508 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.822842 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-15 17:08:24.107969638 +0000 UTC Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.822972 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.828282 4822 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.829812 4822 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.829834 4822 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.838454 4822 log.go:25] "Validated CRI v1 runtime API" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.852302 4822 log.go:25] "Validated CRI v1 image API" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.854171 4822 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.857293 4822 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-01-06-41-50-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.857326 4822 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.874076 4822 manager.go:217] Machine: {Timestamp:2025-12-01 06:50:44.872724737 +0000 UTC m=+0.193532463 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:531638cf-8ff2-40bb-b69e-10a6b05dc0e6 BootID:315fcd00-0c1c-414e-8eb4-b46c25a26f75 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:7c:33:fd Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:7c:33:fd Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a0:c3:70 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:6b:01:f0 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:3e:46:ad Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:a1:8d:91 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:cc:4c:b5 Speed:-1 Mtu:1496} {Name:ens7.44 MacAddress:52:54:00:74:3f:59 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:fa:fd:fc:82:6d:b0 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:5a:7a:e4:ce:37:5f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.874339 4822 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.874457 4822 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.874776 4822 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.874953 4822 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.874990 4822 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.875217 4822 topology_manager.go:138] "Creating topology manager with none policy" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.875229 4822 container_manager_linux.go:303] "Creating device plugin manager" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.875370 4822 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.875396 4822 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.875705 4822 state_mem.go:36] "Initialized new in-memory state store" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.876064 4822 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.876766 4822 kubelet.go:418] "Attempting to sync node with API server" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.876787 4822 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.876834 4822 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.876849 4822 kubelet.go:324] "Adding apiserver pod source" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.876893 4822 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.878306 4822 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.878671 4822 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.878980 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.879036 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.879116 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.879193 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880016 4822 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880565 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880596 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880605 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880614 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880628 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880639 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880648 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880663 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880672 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880682 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880715 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.880725 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.881207 4822 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.881613 4822 server.go:1280] "Started kubelet" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.881977 4822 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.881977 4822 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 01 06:50:44 crc systemd[1]: Started Kubernetes Kubelet. Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.884375 4822 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.884247 4822 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.887939 4822 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.212:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d04be6ad29b18 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:50:44.881586968 +0000 UTC m=+0.202394664,LastTimestamp:2025-12-01 06:50:44.881586968 +0000 UTC m=+0.202394664,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.890373 4822 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.890500 4822 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.890843 4822 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.891052 4822 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.890609 4822 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 11:55:20.813791962 +0000 UTC Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.891391 4822 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 821h4m35.922407146s for next certificate rotation Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.890992 4822 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.891452 4822 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.891917 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="200ms" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892043 4822 server.go:460] "Adding debug handlers to kubelet server" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892208 4822 factory.go:55] Registering systemd factory Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892359 4822 factory.go:221] Registration of the systemd container factory successfully Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892838 4822 factory.go:153] Registering CRI-O factory Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892866 4822 factory.go:221] Registration of the crio container factory successfully Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892944 4822 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.892968 4822 factory.go:103] Registering Raw factory Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.893015 4822 manager.go:1196] Started watching for new ooms in manager Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.893420 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.893583 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.893754 4822 manager.go:319] Starting recovery of all containers Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913144 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913448 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913475 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913498 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913513 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913531 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913546 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913594 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913625 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913667 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913692 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913707 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913725 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913745 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913764 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913778 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913792 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913811 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913826 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913844 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913859 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913873 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913894 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913911 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913931 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913946 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913971 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.913987 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914009 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914027 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914044 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914066 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914087 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914109 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914124 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914143 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914160 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914177 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914196 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.914211 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917310 4822 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917380 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917401 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917447 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917469 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917483 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917578 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917599 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917614 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917631 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917680 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917694 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917711 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917737 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917755 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917777 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917794 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917808 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917827 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917841 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917853 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917870 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917882 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917899 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917911 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917927 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917944 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917958 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917974 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917986 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.917999 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918015 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918027 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918045 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918058 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918072 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918089 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918101 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918113 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918128 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918142 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918157 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918170 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918182 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918197 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918211 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918227 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918240 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918253 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918271 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918284 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918301 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918312 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918326 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918343 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918356 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918373 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918385 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918399 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918416 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918431 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918448 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918463 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918478 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918494 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918516 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918535 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918579 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918599 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918624 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918662 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918678 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918696 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918715 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918729 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918748 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918761 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918776 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918788 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918801 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918816 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918832 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918849 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918866 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918879 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918895 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918908 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918920 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918938 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918951 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918969 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918982 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.918997 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919014 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919027 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919045 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919059 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919073 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919091 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919105 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919122 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919136 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919151 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919167 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919183 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919200 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919213 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919226 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919244 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919259 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919273 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919289 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919313 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919333 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919348 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919361 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919377 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919391 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919408 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919422 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919435 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919451 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919464 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919482 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919495 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919509 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919526 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919540 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919612 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919637 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919659 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919678 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919692 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919708 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919722 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919735 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919753 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919767 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919796 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919812 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919842 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919859 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919878 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919891 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919908 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919925 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919973 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919986 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.919999 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920038 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920053 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920096 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920108 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920169 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920217 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920229 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920245 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920262 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920280 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920295 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920306 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920344 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920405 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920418 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920433 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920445 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920457 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920473 4822 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920501 4822 reconstruct.go:97] "Volume reconstruction finished" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.920510 4822 reconciler.go:26] "Reconciler: start to sync state" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.926729 4822 manager.go:324] Recovery completed Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.940607 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.945216 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.945259 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.945271 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.947124 4822 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.947576 4822 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.947591 4822 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.947609 4822 state_mem.go:36] "Initialized new in-memory state store" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.949450 4822 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.949506 4822 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.949537 4822 kubelet.go:2335] "Starting kubelet main sync loop" Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.949668 4822 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 01 06:50:44 crc kubenswrapper[4822]: W1201 06:50:44.950382 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.950456 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.959387 4822 policy_none.go:49] "None policy: Start" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.959998 4822 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 01 06:50:44 crc kubenswrapper[4822]: I1201 06:50:44.960020 4822 state_mem.go:35] "Initializing new in-memory state store" Dec 01 06:50:44 crc kubenswrapper[4822]: E1201 06:50:44.991468 4822 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.010577 4822 manager.go:334] "Starting Device Plugin manager" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.010623 4822 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.010633 4822 server.go:79] "Starting device plugin registration server" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.010966 4822 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.010977 4822 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.011249 4822 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.011315 4822 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.011321 4822 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.017622 4822 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.050696 4822 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.050769 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.051908 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.051954 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.051968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.052130 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.052322 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.052358 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053069 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053099 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053108 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053228 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053421 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053506 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053725 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053755 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.053766 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.054155 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.054224 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.054487 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.054667 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.054791 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.054826 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.055260 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.055335 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.055372 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.056878 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.056905 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.056924 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.056943 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.056954 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.056928 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.057059 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.057198 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.057235 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058232 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058262 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058270 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058449 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058518 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058650 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058681 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.058693 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.059400 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.059432 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.059449 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.092566 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="400ms" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.111276 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.112433 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.112493 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.112539 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.112608 4822 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.113254 4822 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.124811 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.124866 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.124959 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125001 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125035 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125053 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125068 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125082 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125138 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125216 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125276 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125307 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125336 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125365 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.125410 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227046 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227137 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227183 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227222 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227264 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227309 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227297 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227373 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227325 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227407 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227470 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227408 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227524 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227576 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227608 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227638 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227496 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227665 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227724 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227740 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227667 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227778 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227808 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227835 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227863 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227877 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227819 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.227927 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.228053 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.228053 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.313997 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.316495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.316618 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.316638 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.316674 4822 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.317447 4822 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.401670 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.413290 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: W1201 06:50:45.438771 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-52efcd5758cfe5e87c1877f6a1439053a5ec0fd627dbcd3b017ae77fa8ec7142 WatchSource:0}: Error finding container 52efcd5758cfe5e87c1877f6a1439053a5ec0fd627dbcd3b017ae77fa8ec7142: Status 404 returned error can't find the container with id 52efcd5758cfe5e87c1877f6a1439053a5ec0fd627dbcd3b017ae77fa8ec7142 Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.441018 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: W1201 06:50:45.443376 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-2dfb5e2f17a472b106faab6aa55a0c399c36794202c532241e43de9c3b7799cf WatchSource:0}: Error finding container 2dfb5e2f17a472b106faab6aa55a0c399c36794202c532241e43de9c3b7799cf: Status 404 returned error can't find the container with id 2dfb5e2f17a472b106faab6aa55a0c399c36794202c532241e43de9c3b7799cf Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.455773 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: W1201 06:50:45.463092 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-478ee6cec1d29658453a9d25850d32eee0563f1265233393a7e44891faa79ef2 WatchSource:0}: Error finding container 478ee6cec1d29658453a9d25850d32eee0563f1265233393a7e44891faa79ef2: Status 404 returned error can't find the container with id 478ee6cec1d29658453a9d25850d32eee0563f1265233393a7e44891faa79ef2 Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.464328 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:45 crc kubenswrapper[4822]: W1201 06:50:45.477525 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-160aaabd7c40dedb6489fb1ef23ded7e9a90a91249202f6465f59c52ff495520 WatchSource:0}: Error finding container 160aaabd7c40dedb6489fb1ef23ded7e9a90a91249202f6465f59c52ff495520: Status 404 returned error can't find the container with id 160aaabd7c40dedb6489fb1ef23ded7e9a90a91249202f6465f59c52ff495520 Dec 01 06:50:45 crc kubenswrapper[4822]: W1201 06:50:45.486061 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7ddca73282d855a984bfe1d9fa29d18e0d0b8fc4b1574b00bbcc9d06daae231f WatchSource:0}: Error finding container 7ddca73282d855a984bfe1d9fa29d18e0d0b8fc4b1574b00bbcc9d06daae231f: Status 404 returned error can't find the container with id 7ddca73282d855a984bfe1d9fa29d18e0d0b8fc4b1574b00bbcc9d06daae231f Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.494678 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="800ms" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.717965 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.719462 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.719509 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.719521 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.719566 4822 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.720134 4822 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Dec 01 06:50:45 crc kubenswrapper[4822]: W1201 06:50:45.821785 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:45 crc kubenswrapper[4822]: E1201 06:50:45.821880 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.885361 4822 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.954762 4822 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f64e8bdcf5d223872ba0450a5da013eb677cf3d29242a6371d4bfff02fcab481" exitCode=0 Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.955196 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f64e8bdcf5d223872ba0450a5da013eb677cf3d29242a6371d4bfff02fcab481"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.955414 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2dfb5e2f17a472b106faab6aa55a0c399c36794202c532241e43de9c3b7799cf"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.955807 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.958545 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.958600 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.958613 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.959505 4822 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c" exitCode=0 Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.959589 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.959617 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"52efcd5758cfe5e87c1877f6a1439053a5ec0fd627dbcd3b017ae77fa8ec7142"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.959677 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.961649 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.961673 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.961688 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.963191 4822 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec" exitCode=0 Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.963256 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.963283 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7ddca73282d855a984bfe1d9fa29d18e0d0b8fc4b1574b00bbcc9d06daae231f"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.963356 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.964198 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.964231 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.964247 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.967100 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.967149 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"160aaabd7c40dedb6489fb1ef23ded7e9a90a91249202f6465f59c52ff495520"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.968702 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8" exitCode=0 Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.968745 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.968772 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"478ee6cec1d29658453a9d25850d32eee0563f1265233393a7e44891faa79ef2"} Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.968894 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.970453 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.970487 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.970497 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.973379 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.974073 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.974115 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:45 crc kubenswrapper[4822]: I1201 06:50:45.974135 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:46 crc kubenswrapper[4822]: W1201 06:50:46.293077 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:46 crc kubenswrapper[4822]: E1201 06:50:46.293177 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:46 crc kubenswrapper[4822]: E1201 06:50:46.295240 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="1.6s" Dec 01 06:50:46 crc kubenswrapper[4822]: W1201 06:50:46.327134 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:46 crc kubenswrapper[4822]: E1201 06:50:46.327245 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:46 crc kubenswrapper[4822]: W1201 06:50:46.367387 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:50:46 crc kubenswrapper[4822]: E1201 06:50:46.367488 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.521064 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.523091 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.523160 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.523172 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.523198 4822 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.973245 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.973288 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.973298 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.973381 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.974720 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.974759 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.974769 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.976263 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.976304 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.976319 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.976337 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.977325 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.977360 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.977374 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.982223 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.982256 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.982270 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.982283 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.982296 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.982394 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.983288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.983316 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.983325 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.985029 4822 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b120ffd23b9b4da9e822a929ab17cc2e15f27eecd14302e3c558088b5fb55c2e" exitCode=0 Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.985089 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b120ffd23b9b4da9e822a929ab17cc2e15f27eecd14302e3c558088b5fb55c2e"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.985366 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.987018 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.987087 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.987105 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.987272 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d9cbbbce6b94ff404ff8fff6133b99caf125a800cd26bda660ca6fd977008b99"} Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.987410 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.988253 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.988281 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:46 crc kubenswrapper[4822]: I1201 06:50:46.988294 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.014257 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.531131 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.995823 4822 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="49e872f5599fc09f5d18d83a56b90c497d1a0f6cce7dfa4104f06820855b48fb" exitCode=0 Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.995885 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"49e872f5599fc09f5d18d83a56b90c497d1a0f6cce7dfa4104f06820855b48fb"} Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.996029 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.996039 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:47 crc kubenswrapper[4822]: I1201 06:50:47.996061 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.000678 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.000748 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.000767 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.000765 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.000917 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.000938 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.001546 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.001657 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:48 crc kubenswrapper[4822]: I1201 06:50:48.001685 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:49 crc kubenswrapper[4822]: I1201 06:50:49.002689 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f741a586e1e94e2585d814cc0aebe51a40b7ac1460c8b14df0aad646c56a7d20"} Dec 01 06:50:49 crc kubenswrapper[4822]: I1201 06:50:49.002756 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2133770bc9f7fb2f4df85df29d41762aa282fc27895cdf45e5b265e2857bf9b1"} Dec 01 06:50:49 crc kubenswrapper[4822]: I1201 06:50:49.002779 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9065b298625aaa29ce98ee6aa5de960827a49bb1060415878d996f2f665379d1"} Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.012866 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f8aae258bf1d068a067033f688114357dcbda569d07d9e1c464335dfb77d690a"} Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.012945 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"30946a343dd72134786b48247d810a698ead859271e44558d8032f0eaa9c81e7"} Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.013047 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.014519 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.014610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.014630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.071746 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.072009 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.073866 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.073959 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.073998 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:50 crc kubenswrapper[4822]: I1201 06:50:50.172115 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.016178 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.016320 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.018249 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.018327 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.018350 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.018917 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.018985 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.019013 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.176052 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.176311 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.176374 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.178378 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.178443 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.178461 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.787860 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:51 crc kubenswrapper[4822]: I1201 06:50:51.951923 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.019132 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.019156 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.019185 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.020710 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.020760 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.020779 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.020714 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.020853 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.020868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.136202 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.160533 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.160929 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.162736 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.162821 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.162847 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:52 crc kubenswrapper[4822]: I1201 06:50:52.787716 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.021918 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.022013 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.023596 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.023632 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.023636 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.023717 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.023742 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.023648 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.071941 4822 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 06:50:53 crc kubenswrapper[4822]: I1201 06:50:53.072022 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 06:50:55 crc kubenswrapper[4822]: E1201 06:50:55.017767 4822 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 06:50:55 crc kubenswrapper[4822]: I1201 06:50:55.042995 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:55 crc kubenswrapper[4822]: I1201 06:50:55.043190 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:55 crc kubenswrapper[4822]: I1201 06:50:55.044666 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:55 crc kubenswrapper[4822]: I1201 06:50:55.044730 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:55 crc kubenswrapper[4822]: I1201 06:50:55.044752 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:55 crc kubenswrapper[4822]: I1201 06:50:55.051496 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.030536 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.031630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.031690 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.031707 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.037200 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.230861 4822 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.231015 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 06:50:56 crc kubenswrapper[4822]: E1201 06:50:56.524146 4822 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 01 06:50:56 crc kubenswrapper[4822]: I1201 06:50:56.886921 4822 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 01 06:50:57 crc kubenswrapper[4822]: E1201 06:50:57.016315 4822 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 01 06:50:57 crc kubenswrapper[4822]: I1201 06:50:57.033906 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:57 crc kubenswrapper[4822]: I1201 06:50:57.035188 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:57 crc kubenswrapper[4822]: I1201 06:50:57.035249 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:57 crc kubenswrapper[4822]: I1201 06:50:57.035273 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:57 crc kubenswrapper[4822]: W1201 06:50:57.428842 4822 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 01 06:50:57 crc kubenswrapper[4822]: I1201 06:50:57.428941 4822 trace.go:236] Trace[582426407]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:50:47.427) (total time: 10001ms): Dec 01 06:50:57 crc kubenswrapper[4822]: Trace[582426407]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:50:57.428) Dec 01 06:50:57 crc kubenswrapper[4822]: Trace[582426407]: [10.001232585s] [10.001232585s] END Dec 01 06:50:57 crc kubenswrapper[4822]: E1201 06:50:57.428964 4822 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 01 06:50:57 crc kubenswrapper[4822]: E1201 06:50:57.896271 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.124925 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.126427 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.126475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.126488 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.126519 4822 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.142381 4822 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.142437 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.148184 4822 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 06:50:58 crc kubenswrapper[4822]: I1201 06:50:58.148245 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 06:51:00 crc kubenswrapper[4822]: I1201 06:51:00.934815 4822 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.184366 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.184634 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.186451 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.186494 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.186508 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.191895 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.219728 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 01 06:51:01 crc kubenswrapper[4822]: I1201 06:51:01.236898 4822 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.045896 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.047703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.047867 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.047892 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.196773 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.197142 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.199222 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.199281 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.199299 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:02 crc kubenswrapper[4822]: I1201 06:51:02.219748 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.048763 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.050198 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.050253 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.050270 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.072364 4822 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.072457 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.152863 4822 trace.go:236] Trace[357578427]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:50:48.712) (total time: 14438ms): Dec 01 06:51:03 crc kubenswrapper[4822]: Trace[357578427]: ---"Objects listed" error: 14438ms (06:51:03.151) Dec 01 06:51:03 crc kubenswrapper[4822]: Trace[357578427]: [14.438984611s] [14.438984611s] END Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.152920 4822 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 06:51:03 crc kubenswrapper[4822]: E1201 06:51:03.153947 4822 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.155870 4822 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.156387 4822 trace.go:236] Trace[296035635]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:50:48.185) (total time: 14970ms): Dec 01 06:51:03 crc kubenswrapper[4822]: Trace[296035635]: ---"Objects listed" error: 14970ms (06:51:03.156) Dec 01 06:51:03 crc kubenswrapper[4822]: Trace[296035635]: [14.970600627s] [14.970600627s] END Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.156417 4822 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.157191 4822 trace.go:236] Trace[83577268]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:50:48.352) (total time: 14804ms): Dec 01 06:51:03 crc kubenswrapper[4822]: Trace[83577268]: ---"Objects listed" error: 14804ms (06:51:03.156) Dec 01 06:51:03 crc kubenswrapper[4822]: Trace[83577268]: [14.80475028s] [14.80475028s] END Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.157239 4822 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.205340 4822 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.205419 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.205964 4822 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.206074 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.206677 4822 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.206781 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.888125 4822 apiserver.go:52] "Watching apiserver" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.892315 4822 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.892676 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.893491 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.893747 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:03 crc kubenswrapper[4822]: E1201 06:51:03.893853 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.894093 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.894151 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.894426 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.894601 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:03 crc kubenswrapper[4822]: E1201 06:51:03.894648 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:03 crc kubenswrapper[4822]: E1201 06:51:03.894735 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.897432 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.897458 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.897505 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.897718 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.897874 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.898588 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.898766 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.900382 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.900390 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.989709 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:03 crc kubenswrapper[4822]: I1201 06:51:03.992448 4822 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.011986 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.027925 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.042213 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.052651 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.054188 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf" exitCode=255 Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.054225 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf"} Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.054416 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062091 4822 scope.go:117] "RemoveContainer" containerID="dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062534 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062623 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062656 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062682 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062707 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062732 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062752 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062776 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062783 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062799 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062823 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062846 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062867 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062890 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062902 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062909 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062912 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.062977 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063144 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063158 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063169 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063352 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063423 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063507 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063584 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063732 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063765 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063918 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063879 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063951 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063965 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.063992 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064012 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064034 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064102 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064124 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064145 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064167 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064187 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064215 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064238 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064258 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064279 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064302 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064323 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064345 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064344 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064363 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064369 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064424 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064439 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064460 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064478 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064494 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064509 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064523 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064539 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064567 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064584 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064605 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064617 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064635 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064626 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064647 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064665 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064677 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064755 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064793 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064813 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064830 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064902 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064929 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064939 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.064975 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065008 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065019 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065043 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065059 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065078 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065113 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065150 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065181 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065186 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065237 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065249 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065254 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065263 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065266 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065300 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065320 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065335 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065351 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065367 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065383 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065398 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065414 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065429 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065439 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065446 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065480 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065489 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065477 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065607 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065729 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065774 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065839 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066000 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066001 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066086 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066248 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066377 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066414 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066509 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066497 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066535 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.065490 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066589 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066605 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066621 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066637 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066659 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066674 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066677 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066690 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066733 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066762 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066789 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066818 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066841 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066865 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066890 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066914 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066936 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066943 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066960 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.066988 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067014 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067040 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067065 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067090 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067118 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067142 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067166 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067189 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067216 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067241 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067265 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067287 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067312 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067337 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067360 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067382 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067406 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067438 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067463 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067497 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067520 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067590 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067616 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067641 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068173 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068206 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068232 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068256 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068285 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068314 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068338 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068363 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068392 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068414 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068437 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068463 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068486 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068510 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068533 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068573 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068597 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068624 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068647 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068672 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068696 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068717 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068740 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068764 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068789 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068813 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068837 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068860 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068882 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068907 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.068930 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069264 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069293 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069315 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069338 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069362 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069389 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069413 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069436 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069460 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069484 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069508 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069530 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069570 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069597 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069620 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069656 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069682 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069707 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069730 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069752 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069775 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069805 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069829 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069859 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069882 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069905 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069929 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069953 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069974 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069996 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070020 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070046 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070070 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070094 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070119 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070142 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070166 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.072879 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.072931 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.072968 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073005 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073041 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073088 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073131 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073167 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073211 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073248 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073365 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073406 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073459 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073502 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073539 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073628 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074301 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074425 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074476 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074517 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074580 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074628 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074705 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074749 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074785 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074826 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075916 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075985 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.076026 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.076116 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.076157 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092597 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092657 4822 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092680 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092701 4822 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092731 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092753 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092773 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092796 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092824 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092848 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092867 4822 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092891 4822 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092910 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092931 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092953 4822 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092979 4822 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093000 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093019 4822 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093039 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093068 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093088 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093106 4822 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093125 4822 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093149 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093168 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093187 4822 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093212 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093232 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093253 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093273 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093298 4822 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093317 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093338 4822 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093358 4822 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093386 4822 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093407 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093428 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093450 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093477 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093501 4822 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093523 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093571 4822 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093595 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093617 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.090398 4822 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093793 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082934 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.085322 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067092 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067146 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067184 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067521 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.067656 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069363 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.069421 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070345 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070356 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070364 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070571 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070578 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070667 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.070798 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.071033 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.071279 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.071570 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.071724 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.072980 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073476 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073692 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073910 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073945 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.073984 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074045 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074143 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074149 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074196 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074340 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074351 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074498 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074503 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074511 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.074695 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075000 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075142 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075565 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075566 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075586 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.075476 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.076364 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.076377 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.076996 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.096985 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.077109 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.077407 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.077763 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.077927 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.078388 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.078448 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.078679 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.078800 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.079058 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.078005 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.079432 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.080679 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.081296 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.081429 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.081765 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.081930 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.081097 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082200 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082355 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082377 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082740 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082793 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082845 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.082949 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.083041 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.085176 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.085795 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.085983 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.086166 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.086317 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.086495 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.086985 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.087278 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.087394 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.087595 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.087906 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.088081 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.077397 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.088539 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.088868 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.089019 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.089047 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.089172 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.089295 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.089627 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.089922 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.090011 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.090321 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.090983 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.091171 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.091257 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.091372 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.091967 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092019 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092081 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092133 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092518 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092699 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.092803 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093221 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093241 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093501 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093604 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093620 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.093657 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:51:04.593625652 +0000 UTC m=+19.914433348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093915 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.093986 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.094074 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.095522 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.090622 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.088532 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.099525 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.100023 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.100056 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.100074 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.100093 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.100126 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.100193 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.100382 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:04.600362901 +0000 UTC m=+19.921170587 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.100599 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:04.600587207 +0000 UTC m=+19.921394893 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.102252 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.103910 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.106001 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.106298 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.106368 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.106620 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.112058 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.112080 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.112092 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.112146 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:04.612130469 +0000 UTC m=+19.932938155 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.116562 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.116816 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.116410 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.117113 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.117124 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.117255 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.117315 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.117859 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.117883 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.118004 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.119025 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.119157 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.120237 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.120442 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.120456 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.120468 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.120514 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:04.620499843 +0000 UTC m=+19.941307629 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.120934 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.125982 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.126028 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.126299 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.126407 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.126420 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.126721 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.126899 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.127101 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.128287 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.128882 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.130373 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.131932 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.133709 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.133943 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.140052 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.141318 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.144406 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.146191 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.150758 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.153425 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.160639 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.175465 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.185202 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.194063 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200624 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200695 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200733 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200816 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200844 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200859 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200871 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200883 4822 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200898 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200910 4822 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200922 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200933 4822 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200944 4822 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200956 4822 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200969 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200981 4822 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.200992 4822 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201003 4822 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201017 4822 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201028 4822 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201039 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201042 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201050 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201083 4822 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201099 4822 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201109 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201118 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201127 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201136 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201144 4822 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201152 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201161 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201169 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201180 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201188 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201196 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201205 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201214 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201222 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201229 4822 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201237 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201245 4822 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201253 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201262 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201270 4822 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201279 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201287 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201297 4822 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201305 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201316 4822 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201323 4822 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201331 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201338 4822 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201346 4822 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201353 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201361 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201369 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201376 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201383 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201407 4822 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201417 4822 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201426 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201434 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201441 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201449 4822 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201457 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201464 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201472 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201480 4822 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201489 4822 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201496 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201504 4822 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201512 4822 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201521 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201529 4822 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201537 4822 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201560 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201569 4822 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201578 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201586 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201594 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201603 4822 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201610 4822 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201617 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201625 4822 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201632 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201640 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201649 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201656 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201664 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201673 4822 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201681 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201689 4822 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201697 4822 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201705 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201714 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201722 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201730 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201737 4822 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201745 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201753 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201761 4822 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201768 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201776 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201785 4822 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201793 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201800 4822 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201809 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201816 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201824 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201831 4822 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201839 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201846 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201854 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201861 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201870 4822 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201878 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201885 4822 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201892 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201900 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201907 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201938 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201948 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201957 4822 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201964 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201973 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201980 4822 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201988 4822 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.201997 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202005 4822 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202013 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202022 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202029 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202037 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202045 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202054 4822 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202061 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202069 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202077 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202085 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202093 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202100 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202110 4822 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202117 4822 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202125 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202132 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202140 4822 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202147 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202155 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.202163 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.219611 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.229511 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-4bf83401af77742bb14b7c83cb1613c433b3593fb38829bc5b9ed8c5198676a3 WatchSource:0}: Error finding container 4bf83401af77742bb14b7c83cb1613c433b3593fb38829bc5b9ed8c5198676a3: Status 404 returned error can't find the container with id 4bf83401af77742bb14b7c83cb1613c433b3593fb38829bc5b9ed8c5198676a3 Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.236277 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.249189 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.257937 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-2d29f22482cb732421c4fb41ebbf181e3d3fd81df3e0da342a1594e62b0c9837 WatchSource:0}: Error finding container 2d29f22482cb732421c4fb41ebbf181e3d3fd81df3e0da342a1594e62b0c9837: Status 404 returned error can't find the container with id 2d29f22482cb732421c4fb41ebbf181e3d3fd81df3e0da342a1594e62b0c9837 Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.610836 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.610998 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:51:05.610976478 +0000 UTC m=+20.931784174 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.611301 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.611359 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.611440 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.611454 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.611500 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:05.611488762 +0000 UTC m=+20.932296448 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.611526 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:05.611516473 +0000 UTC m=+20.932324159 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.712269 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.712313 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712425 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712441 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712452 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712492 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712532 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712568 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712501 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:05.712488425 +0000 UTC m=+21.033296111 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.712692 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:05.712648509 +0000 UTC m=+21.033456215 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.742905 4822 csr.go:261] certificate signing request csr-hszc8 is approved, waiting to be issued Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.792629 4822 csr.go:257] certificate signing request csr-hszc8 is issued Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.823456 4822 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823799 4822 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823858 4822 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823884 4822 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823804 4822 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823893 4822 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823862 4822 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823861 4822 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.823815 4822 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: W1201 06:51:04.824418 4822 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.957927 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:04 crc kubenswrapper[4822]: E1201 06:51:04.958091 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.960561 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.961074 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.961866 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.962426 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.963025 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.963499 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.964060 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.964581 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.965155 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.967339 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.967891 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.968721 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.969254 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.969786 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.970306 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.970831 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.971371 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.971754 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.972289 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.972864 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.972945 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:04Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.973324 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.973861 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.974278 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.974937 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.975385 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.977464 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.978297 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.978889 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.979636 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.980144 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.980689 4822 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.980793 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.982096 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.982623 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.983004 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.984056 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.984725 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.985316 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.987889 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.988567 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.989038 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.989622 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.990186 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.990761 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.990868 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:04Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.991199 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.991730 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.992255 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.992967 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.993407 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.993886 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.994327 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.994814 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.995341 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 01 06:51:04 crc kubenswrapper[4822]: I1201 06:51:04.995790 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.003990 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.021435 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.037717 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.056297 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.058134 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.059650 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.059722 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.060839 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"895c3fe4f29396a725b7243d0366c56087b9dde27d50b1be589b6a4cdcc487f0"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.062298 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.062339 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.062351 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2d29f22482cb732421c4fb41ebbf181e3d3fd81df3e0da342a1594e62b0c9837"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.063438 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.063471 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4bf83401af77742bb14b7c83cb1613c433b3593fb38829bc5b9ed8c5198676a3"} Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.071340 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.090530 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.100593 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.113939 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.128865 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.147672 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.179287 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.193477 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.197526 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-8v9xh"] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.197825 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.199263 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.199427 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.199893 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.216245 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.216842 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6e244f28-ce62-4ba3-bc7f-292a39725ae6-hosts-file\") pod \"node-resolver-8v9xh\" (UID: \"6e244f28-ce62-4ba3-bc7f-292a39725ae6\") " pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.216893 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8rm5\" (UniqueName: \"kubernetes.io/projected/6e244f28-ce62-4ba3-bc7f-292a39725ae6-kube-api-access-r8rm5\") pod \"node-resolver-8v9xh\" (UID: \"6e244f28-ce62-4ba3-bc7f-292a39725ae6\") " pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.238717 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.261824 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.276327 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.294412 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.318149 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6e244f28-ce62-4ba3-bc7f-292a39725ae6-hosts-file\") pod \"node-resolver-8v9xh\" (UID: \"6e244f28-ce62-4ba3-bc7f-292a39725ae6\") " pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.318196 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8rm5\" (UniqueName: \"kubernetes.io/projected/6e244f28-ce62-4ba3-bc7f-292a39725ae6-kube-api-access-r8rm5\") pod \"node-resolver-8v9xh\" (UID: \"6e244f28-ce62-4ba3-bc7f-292a39725ae6\") " pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.318322 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6e244f28-ce62-4ba3-bc7f-292a39725ae6-hosts-file\") pod \"node-resolver-8v9xh\" (UID: \"6e244f28-ce62-4ba3-bc7f-292a39725ae6\") " pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.323881 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.343333 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8rm5\" (UniqueName: \"kubernetes.io/projected/6e244f28-ce62-4ba3-bc7f-292a39725ae6-kube-api-access-r8rm5\") pod \"node-resolver-8v9xh\" (UID: \"6e244f28-ce62-4ba3-bc7f-292a39725ae6\") " pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.362809 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.388656 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.513325 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-8v9xh" Dec 01 06:51:05 crc kubenswrapper[4822]: W1201 06:51:05.526332 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e244f28_ce62_4ba3_bc7f_292a39725ae6.slice/crio-af3026819f11d66d685ebfc47451137ce9d15a3df790affaae30695fd1198c3d WatchSource:0}: Error finding container af3026819f11d66d685ebfc47451137ce9d15a3df790affaae30695fd1198c3d: Status 404 returned error can't find the container with id af3026819f11d66d685ebfc47451137ce9d15a3df790affaae30695fd1198c3d Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.618276 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-kb9ml"] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.618804 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.621166 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.621223 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.621242 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.621303 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.621357 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:51:07.621313899 +0000 UTC m=+22.942121585 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.621378 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:07.62136947 +0000 UTC m=+22.942177166 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.621414 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.621505 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:07.621482574 +0000 UTC m=+22.942290330 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.623247 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-75mdq"] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.623905 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.627041 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.627093 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.627729 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628022 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628036 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-b6tpr"] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628142 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628302 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628321 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628425 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628616 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2cz64"] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628781 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628817 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628986 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.628994 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.638244 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.638798 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.639117 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.639194 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.639340 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.639419 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.639616 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.640000 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.644222 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.658098 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.680388 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.692374 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.703762 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.715434 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722529 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-var-lib-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722577 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722596 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-etc-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722613 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-cni-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722635 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-conf-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722653 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-node-log\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722670 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-ovn-kubernetes\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722689 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-cni-multus\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722707 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-ovn\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722731 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-k8s-cni-cncf-io\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722812 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-systemd\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722858 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8284d339-ff12-453a-be42-4540e44252ee-ovn-node-metrics-cert\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722878 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlfwt\" (UniqueName: \"kubernetes.io/projected/8284d339-ff12-453a-be42-4540e44252ee-kube-api-access-vlfwt\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722895 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-kubelet\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722921 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-slash\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722943 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-cnibin\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722958 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-env-overrides\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722975 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-script-lib\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.722990 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-system-cni-dir\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723010 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-systemd-units\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723028 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723049 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723100 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a6c6a838-3829-4058-aa59-1302d07e4507-mcd-auth-proxy-config\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723131 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-kubelet\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723149 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-multus-certs\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723167 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9m8d\" (UniqueName: \"kubernetes.io/projected/a6c6a838-3829-4058-aa59-1302d07e4507-kube-api-access-x9m8d\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723193 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-bin\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723210 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-system-cni-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723225 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-cnibin\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723261 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-os-release\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723323 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-netns\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723350 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-log-socket\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723372 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-netd\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723400 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq4w5\" (UniqueName: \"kubernetes.io/projected/34b58185-4742-4187-9243-860433c413d8-kube-api-access-jq4w5\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723442 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723468 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b96s\" (UniqueName: \"kubernetes.io/projected/ebc3e4ad-c394-405c-ac35-c77290463348-kube-api-access-5b96s\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723497 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-cni-bin\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723527 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-etc-kubernetes\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.723568 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723575 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-config\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723610 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/34b58185-4742-4187-9243-860433c413d8-cni-binary-copy\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.723586 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.723691 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723632 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a6c6a838-3829-4058-aa59-1302d07e4507-rootfs\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723733 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a6c6a838-3829-4058-aa59-1302d07e4507-proxy-tls\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723754 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ebc3e4ad-c394-405c-ac35-c77290463348-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723770 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-netns\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.723812 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:07.723776711 +0000 UTC m=+23.044584397 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723863 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-hostroot\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723913 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/34b58185-4742-4187-9243-860433c413d8-multus-daemon-config\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723955 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.723981 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-os-release\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.724016 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-socket-dir-parent\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.724049 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ebc3e4ad-c394-405c-ac35-c77290463348-cni-binary-copy\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.724102 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.724131 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.724146 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.724199 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:07.724180702 +0000 UTC m=+23.044988388 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.728025 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.737782 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.745690 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.761373 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.773134 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.794279 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-01 06:46:04 +0000 UTC, rotation deadline is 2026-08-17 23:15:31.975807856 +0000 UTC Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.794356 4822 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6232h24m26.181453413s for next certificate rotation Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.798015 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.820765 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.824999 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-var-lib-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825028 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825045 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-conf-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825063 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-etc-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825077 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-cni-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825137 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-var-lib-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825157 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-conf-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825192 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-ovn\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825174 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-ovn\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825274 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-node-log\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825279 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-etc-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825322 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-ovn-kubernetes\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825349 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-cni-multus\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825366 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-ovn-kubernetes\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825379 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-k8s-cni-cncf-io\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825404 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-systemd\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825414 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-cni-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825433 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8284d339-ff12-453a-be42-4540e44252ee-ovn-node-metrics-cert\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825436 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-k8s-cni-cncf-io\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825451 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825440 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-cni-multus\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825490 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-systemd\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825459 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlfwt\" (UniqueName: \"kubernetes.io/projected/8284d339-ff12-453a-be42-4540e44252ee-kube-api-access-vlfwt\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825537 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-cnibin\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825578 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-kubelet\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825603 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-slash\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825621 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-cnibin\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825629 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-systemd-units\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825694 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-slash\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825654 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-systemd-units\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825719 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825700 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-openvswitch\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825739 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-kubelet\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825895 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825950 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825948 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-env-overrides\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826003 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-script-lib\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.825377 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-node-log\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826028 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-system-cni-dir\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826060 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a6c6a838-3829-4058-aa59-1302d07e4507-mcd-auth-proxy-config\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826099 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-bin\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826127 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-system-cni-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826149 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-kubelet\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826153 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-bin\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826171 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-multus-certs\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826122 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-system-cni-dir\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826196 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9m8d\" (UniqueName: \"kubernetes.io/projected/a6c6a838-3829-4058-aa59-1302d07e4507-kube-api-access-x9m8d\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826475 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-netns\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826217 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-system-cni-dir\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826244 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-multus-certs\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826229 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-kubelet\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826570 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-netns\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826585 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-log-socket\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826614 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-netd\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826636 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-log-socket\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826643 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-cnibin\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826668 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-os-release\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826678 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-netd\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826695 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b96s\" (UniqueName: \"kubernetes.io/projected/ebc3e4ad-c394-405c-ac35-c77290463348-kube-api-access-5b96s\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826719 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-cnibin\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826720 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-cni-bin\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826752 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-env-overrides\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826758 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-etc-kubernetes\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826769 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-var-lib-cni-bin\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826795 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-etc-kubernetes\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826807 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq4w5\" (UniqueName: \"kubernetes.io/projected/34b58185-4742-4187-9243-860433c413d8-kube-api-access-jq4w5\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826839 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-script-lib\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826871 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/34b58185-4742-4187-9243-860433c413d8-cni-binary-copy\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826894 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ebc3e4ad-c394-405c-ac35-c77290463348-os-release\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826904 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a6c6a838-3829-4058-aa59-1302d07e4507-rootfs\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826925 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a6c6a838-3829-4058-aa59-1302d07e4507-mcd-auth-proxy-config\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826940 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-config\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826981 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-netns\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.826930 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a6c6a838-3829-4058-aa59-1302d07e4507-rootfs\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827006 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-hostroot\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827032 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/34b58185-4742-4187-9243-860433c413d8-multus-daemon-config\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827041 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-host-run-netns\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827057 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a6c6a838-3829-4058-aa59-1302d07e4507-proxy-tls\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827059 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-hostroot\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827082 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ebc3e4ad-c394-405c-ac35-c77290463348-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827105 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-os-release\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827140 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-socket-dir-parent\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827173 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ebc3e4ad-c394-405c-ac35-c77290463348-cni-binary-copy\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827210 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-os-release\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827284 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/34b58185-4742-4187-9243-860433c413d8-multus-socket-dir-parent\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827888 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/34b58185-4742-4187-9243-860433c413d8-multus-daemon-config\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.827897 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/34b58185-4742-4187-9243-860433c413d8-cni-binary-copy\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.828051 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ebc3e4ad-c394-405c-ac35-c77290463348-cni-binary-copy\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.828053 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ebc3e4ad-c394-405c-ac35-c77290463348-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.828080 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-config\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.830990 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8284d339-ff12-453a-be42-4540e44252ee-ovn-node-metrics-cert\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.835219 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a6c6a838-3829-4058-aa59-1302d07e4507-proxy-tls\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.838106 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.845022 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9m8d\" (UniqueName: \"kubernetes.io/projected/a6c6a838-3829-4058-aa59-1302d07e4507-kube-api-access-x9m8d\") pod \"machine-config-daemon-2cz64\" (UID: \"a6c6a838-3829-4058-aa59-1302d07e4507\") " pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.845156 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.847527 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq4w5\" (UniqueName: \"kubernetes.io/projected/34b58185-4742-4187-9243-860433c413d8-kube-api-access-jq4w5\") pod \"multus-b6tpr\" (UID: \"34b58185-4742-4187-9243-860433c413d8\") " pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.853902 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.856916 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlfwt\" (UniqueName: \"kubernetes.io/projected/8284d339-ff12-453a-be42-4540e44252ee-kube-api-access-vlfwt\") pod \"ovnkube-node-75mdq\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.862299 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b96s\" (UniqueName: \"kubernetes.io/projected/ebc3e4ad-c394-405c-ac35-c77290463348-kube-api-access-5b96s\") pod \"multus-additional-cni-plugins-kb9ml\" (UID: \"ebc3e4ad-c394-405c-ac35-c77290463348\") " pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.870537 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.882793 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.895786 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.905121 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.913845 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.929235 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.943142 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.945721 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.949702 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.949726 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.949802 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:05 crc kubenswrapper[4822]: E1201 06:51:05.949899 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.955033 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.961681 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.967398 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.974901 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-b6tpr" Dec 01 06:51:05 crc kubenswrapper[4822]: W1201 06:51:05.978500 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8284d339_ff12_453a_be42_4540e44252ee.slice/crio-0d88d1549bb52a904a435414e0330cdd92fde7214b0f3ef3b0470295cabbfbf6 WatchSource:0}: Error finding container 0d88d1549bb52a904a435414e0330cdd92fde7214b0f3ef3b0470295cabbfbf6: Status 404 returned error can't find the container with id 0d88d1549bb52a904a435414e0330cdd92fde7214b0f3ef3b0470295cabbfbf6 Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.983561 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.985277 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 06:51:05 crc kubenswrapper[4822]: I1201 06:51:05.995094 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:51:06 crc kubenswrapper[4822]: W1201 06:51:06.021840 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6c6a838_3829_4058_aa59_1302d07e4507.slice/crio-cbabef20b8035d6dce0b932b8bb995528ab7ac6ada04e28d92a1bf69f124a6bd WatchSource:0}: Error finding container cbabef20b8035d6dce0b932b8bb995528ab7ac6ada04e28d92a1bf69f124a6bd: Status 404 returned error can't find the container with id cbabef20b8035d6dce0b932b8bb995528ab7ac6ada04e28d92a1bf69f124a6bd Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.069995 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerStarted","Data":"041393dcadc67a7e1963107f081f1214c5a338ce31ddf932c2503ad34730e953"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.073119 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"0d88d1549bb52a904a435414e0330cdd92fde7214b0f3ef3b0470295cabbfbf6"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.076178 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerStarted","Data":"9f5a8f5dfe6f87615b818d09a7ab3ef9fed39da99ef363fb3d125767382541d0"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.080299 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-8v9xh" event={"ID":"6e244f28-ce62-4ba3-bc7f-292a39725ae6","Type":"ContainerStarted","Data":"dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.080352 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-8v9xh" event={"ID":"6e244f28-ce62-4ba3-bc7f-292a39725ae6","Type":"ContainerStarted","Data":"af3026819f11d66d685ebfc47451137ce9d15a3df790affaae30695fd1198c3d"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.082923 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"cbabef20b8035d6dce0b932b8bb995528ab7ac6ada04e28d92a1bf69f124a6bd"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.088054 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.094194 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.111280 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.112861 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.120766 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.126230 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.139463 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.152698 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.173123 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.185910 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.201164 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.220836 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.241064 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.243303 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.256398 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.278057 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.354280 4822 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.356138 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.356185 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.356203 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.356297 4822 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.375509 4822 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.375797 4822 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.379042 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.379090 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.379100 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.379118 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.379127 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.423395 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.427062 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.427229 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.427244 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.427263 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.427285 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.471173 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.480260 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.480319 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.480330 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.480349 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.480362 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.499673 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.510477 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.510564 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.510583 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.510604 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.510623 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.531832 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.536003 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.536120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.536194 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.536275 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.536342 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.555451 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:06Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.555589 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.557343 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.557379 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.557388 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.557404 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.557415 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.659482 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.659522 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.659530 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.659561 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.659574 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.761768 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.761818 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.761832 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.761849 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.761860 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.864589 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.865023 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.865048 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.865063 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.865072 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.950719 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:06 crc kubenswrapper[4822]: E1201 06:51:06.950866 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.968024 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.968080 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.968097 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.968121 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:06 crc kubenswrapper[4822]: I1201 06:51:06.968139 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:06Z","lastTransitionTime":"2025-12-01T06:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.070572 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.070799 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.070879 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.070946 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.071003 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.086127 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" exitCode=0 Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.086212 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.089073 4822 generic.go:334] "Generic (PLEG): container finished" podID="ebc3e4ad-c394-405c-ac35-c77290463348" containerID="f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b" exitCode=0 Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.089151 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerDied","Data":"f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.090526 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.092996 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.093040 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.095692 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerStarted","Data":"671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.105242 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.123138 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.145772 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.164388 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.179382 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.179440 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.179461 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.179484 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.179500 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.188077 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.217069 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.238502 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.253500 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.265878 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.282418 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.283365 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.283398 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.283409 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.283433 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.283447 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.295355 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.326519 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.350596 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.373766 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.385745 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.385784 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.385793 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.385811 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.385821 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.392118 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.403709 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.418024 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.437351 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.457186 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.469456 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.483252 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.494040 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.494120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.494141 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.494168 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.494181 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.497728 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.523774 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.545768 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:07Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.596507 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.596569 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.596579 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.596597 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.596612 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.648893 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.649013 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.649066 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.649164 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.649189 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:51:11.64914371 +0000 UTC m=+26.969951396 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.649206 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.649239 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:11.649227762 +0000 UTC m=+26.970035438 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.649308 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:11.649280083 +0000 UTC m=+26.970087779 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.698716 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.698758 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.698769 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.698789 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.698801 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.750751 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.750830 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751027 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751068 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751078 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751087 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751108 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751131 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751185 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:11.75115588 +0000 UTC m=+27.071963576 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.751222 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:11.751196791 +0000 UTC m=+27.072004517 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.802929 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.803464 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.803485 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.803513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.803533 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.905813 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.905871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.905883 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.905902 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.905915 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:07Z","lastTransitionTime":"2025-12-01T06:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.950528 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:07 crc kubenswrapper[4822]: I1201 06:51:07.950580 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.950750 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:07 crc kubenswrapper[4822]: E1201 06:51:07.951136 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.008171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.008232 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.008250 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.008274 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.008294 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.101911 4822 generic.go:334] "Generic (PLEG): container finished" podID="ebc3e4ad-c394-405c-ac35-c77290463348" containerID="c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec" exitCode=0 Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.101997 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerDied","Data":"c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.109057 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.109133 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.109151 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.109165 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.109182 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.109196 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.111288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.111329 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.111342 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.111359 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.111373 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.128616 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.145460 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.160341 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.177887 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.215087 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.215408 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.215449 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.215461 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.215481 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.215493 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.233900 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.258466 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.274529 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.289031 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.307758 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.315537 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-cv8h4"] Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.316008 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.320749 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.320751 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.321046 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.321299 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.323733 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.323836 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.323878 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.323895 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.323921 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.323939 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.350198 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.357938 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-host\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.358004 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgmqm\" (UniqueName: \"kubernetes.io/projected/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-kube-api-access-wgmqm\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.358052 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-serviceca\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.369658 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.387446 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.412242 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.427534 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.427596 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.427610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.427629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.427643 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.456981 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.459268 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-serviceca\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.459329 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-host\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.459358 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgmqm\" (UniqueName: \"kubernetes.io/projected/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-kube-api-access-wgmqm\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.460382 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-serviceca\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.460431 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-host\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.484417 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgmqm\" (UniqueName: \"kubernetes.io/projected/e28e62c9-6cb1-4d0f-a448-d7c249c20bb5-kube-api-access-wgmqm\") pod \"node-ca-cv8h4\" (UID: \"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\") " pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.490109 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.503021 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.514259 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.525999 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.529670 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.529711 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.529722 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.529736 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.529744 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.537807 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.551837 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.563948 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.573930 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.582584 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.632655 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.632696 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.632708 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.632728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.632741 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.639152 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-cv8h4" Dec 01 06:51:08 crc kubenswrapper[4822]: W1201 06:51:08.650898 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode28e62c9_6cb1_4d0f_a448_d7c249c20bb5.slice/crio-58582179861e9e37bd91300918db2b0bcd3780a3e761c6ab0e9b030b8d6e95f6 WatchSource:0}: Error finding container 58582179861e9e37bd91300918db2b0bcd3780a3e761c6ab0e9b030b8d6e95f6: Status 404 returned error can't find the container with id 58582179861e9e37bd91300918db2b0bcd3780a3e761c6ab0e9b030b8d6e95f6 Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.735035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.735473 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.735485 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.735502 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.735514 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.838414 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.838483 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.838496 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.838520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.838536 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.941378 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.941433 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.941442 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.941462 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.941475 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:08Z","lastTransitionTime":"2025-12-01T06:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:08 crc kubenswrapper[4822]: I1201 06:51:08.950330 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:08 crc kubenswrapper[4822]: E1201 06:51:08.950517 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.044504 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.044569 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.044583 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.044604 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.044619 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.120542 4822 generic.go:334] "Generic (PLEG): container finished" podID="ebc3e4ad-c394-405c-ac35-c77290463348" containerID="dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6" exitCode=0 Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.120610 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerDied","Data":"dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.123786 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-cv8h4" event={"ID":"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5","Type":"ContainerStarted","Data":"4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.123862 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-cv8h4" event={"ID":"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5","Type":"ContainerStarted","Data":"58582179861e9e37bd91300918db2b0bcd3780a3e761c6ab0e9b030b8d6e95f6"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.147349 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.149033 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.149102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.149122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.149154 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.149175 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.167728 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.181386 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.200105 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.220320 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.233065 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.252364 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.252426 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.252445 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.252475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.252497 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.266088 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.292818 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.315042 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.331060 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.346218 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.354988 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.355034 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.355051 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.355067 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.355080 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.362588 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.376516 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.395793 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.417326 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.430710 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.454583 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.457394 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.457469 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.457492 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.457525 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.457576 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.467414 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.481319 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.494676 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.517893 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.543016 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.562171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.562458 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.562610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.562749 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.562867 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.567838 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.585901 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.600763 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.614894 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:09Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.665968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.666018 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.666030 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.666047 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.666058 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.769627 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.769695 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.769718 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.769751 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.769775 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.872969 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.873043 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.873061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.873090 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.873109 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.949777 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.949937 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:09 crc kubenswrapper[4822]: E1201 06:51:09.949961 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:09 crc kubenswrapper[4822]: E1201 06:51:09.950222 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.975451 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.975501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.975514 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.975534 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:09 crc kubenswrapper[4822]: I1201 06:51:09.975589 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:09Z","lastTransitionTime":"2025-12-01T06:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.077933 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.078942 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.079009 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.079037 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.079075 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.079104 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.081938 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.087517 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.100649 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.121156 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.132601 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.136852 4822 generic.go:334] "Generic (PLEG): container finished" podID="ebc3e4ad-c394-405c-ac35-c77290463348" containerID="176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6" exitCode=0 Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.136914 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerDied","Data":"176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.139842 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.167739 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.182588 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.182651 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.182674 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.182695 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.182711 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.204126 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.228749 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.250975 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.270340 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.285144 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.285186 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.285196 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.285213 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.285225 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.290366 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.309460 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.326884 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.343343 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.356530 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.372616 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.390280 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.390340 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.390365 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.390388 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.390400 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.391419 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.414799 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.474395 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.493426 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.493472 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.493481 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.493499 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.493509 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.494924 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.514351 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.534522 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.553271 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.566238 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.586814 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.596672 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.596726 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.596739 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.596762 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.596775 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.608975 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.620820 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.649080 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.668488 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.699645 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.700488 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.700611 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.700702 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.700794 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.804160 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.804240 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.804260 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.804289 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.804308 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.907164 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.907211 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.907223 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.907240 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.907253 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:10Z","lastTransitionTime":"2025-12-01T06:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:10 crc kubenswrapper[4822]: I1201 06:51:10.949839 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:10 crc kubenswrapper[4822]: E1201 06:51:10.950138 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.011004 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.011819 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.011940 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.012058 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.012177 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.114783 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.114826 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.114843 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.114862 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.114876 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.142433 4822 generic.go:334] "Generic (PLEG): container finished" podID="ebc3e4ad-c394-405c-ac35-c77290463348" containerID="06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2" exitCode=0 Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.142505 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerDied","Data":"06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.160023 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.175694 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.186227 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.203741 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.217827 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.217869 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.217879 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.217894 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.217905 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.224399 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.237734 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.251447 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.263781 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.276225 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.289310 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.303776 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.321008 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.321065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.321108 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.321133 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.321150 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.321703 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.335526 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.349750 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:11Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.427521 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.427912 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.428018 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.428148 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.428258 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.531250 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.531316 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.531339 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.531369 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.531390 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.634050 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.634099 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.634111 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.634129 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.634140 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.695509 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.695786 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.695819 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:51:19.69577245 +0000 UTC m=+35.016580176 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.695952 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.695998 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.696055 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:19.696024827 +0000 UTC m=+35.016832533 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.696230 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.696327 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:19.696305195 +0000 UTC m=+35.017112881 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.737163 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.737237 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.737257 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.737285 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.737302 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.796947 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.797018 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797205 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797221 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797272 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797287 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797233 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797329 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797350 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:19.797329518 +0000 UTC m=+35.118137314 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.797410 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:19.797383159 +0000 UTC m=+35.118190885 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.840905 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.840955 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.840970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.840986 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.840999 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.944416 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.944465 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.944476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.944494 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.944506 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:11Z","lastTransitionTime":"2025-12-01T06:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.949945 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:11 crc kubenswrapper[4822]: I1201 06:51:11.949995 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.950073 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:11 crc kubenswrapper[4822]: E1201 06:51:11.950170 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.046480 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.046525 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.046537 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.046576 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.046590 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.148428 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.148994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.149025 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.149057 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.149079 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.151911 4822 generic.go:334] "Generic (PLEG): container finished" podID="ebc3e4ad-c394-405c-ac35-c77290463348" containerID="fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0" exitCode=0 Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.151962 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerDied","Data":"fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.169792 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.183252 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.193008 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.211907 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.224530 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.252070 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.257020 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.257049 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.257058 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.257073 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.257083 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.269383 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.286661 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.298899 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.312232 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.324862 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.337054 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.348270 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.359524 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.359789 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.359802 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.359818 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.359830 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.360467 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:12Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.462538 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.462626 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.462643 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.462664 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.462681 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.565441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.565497 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.565513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.565533 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.565567 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.669061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.669130 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.669150 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.669176 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.669195 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.772430 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.772495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.772512 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.772536 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.772576 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.875778 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.875895 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.875919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.875948 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.875970 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.950304 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:12 crc kubenswrapper[4822]: E1201 06:51:12.950509 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.978689 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.978752 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.978768 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.978788 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:12 crc kubenswrapper[4822]: I1201 06:51:12.978804 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:12Z","lastTransitionTime":"2025-12-01T06:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.081432 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.081471 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.081482 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.081498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.081509 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.161944 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.162521 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.162650 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.171083 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" event={"ID":"ebc3e4ad-c394-405c-ac35-c77290463348","Type":"ContainerStarted","Data":"3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.184658 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.184721 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.184737 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.184762 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.184780 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.186727 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.197684 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.198612 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.215762 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.231540 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.252831 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.278788 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.288379 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.288452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.288477 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.288513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.288543 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.301299 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.322405 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.350638 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.385131 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.391779 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.391839 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.391852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.391872 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.391886 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.399121 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.417788 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.433658 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.446947 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.457373 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.470439 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.485803 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.494044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.494082 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.494094 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.494115 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.494129 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.497724 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.509477 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.523699 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.535058 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.551447 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.577421 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.593259 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.597802 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.597852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.597871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.597898 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.597918 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.618751 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.636136 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.649147 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.660599 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.672462 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.700302 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.700346 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.700362 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.700377 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.700386 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.803066 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.803107 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.803133 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.803146 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.803157 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.906457 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.906540 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.906598 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.906628 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.906649 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:13Z","lastTransitionTime":"2025-12-01T06:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.950118 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:13 crc kubenswrapper[4822]: I1201 06:51:13.950149 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:13 crc kubenswrapper[4822]: E1201 06:51:13.950284 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:13 crc kubenswrapper[4822]: E1201 06:51:13.950407 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.009913 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.009967 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.010005 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.010032 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.010054 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.112908 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.112971 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.112983 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.113002 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.113014 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.175011 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.215630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.215706 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.215716 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.215735 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.215747 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.318792 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.318847 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.318865 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.318890 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.318908 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.421192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.421238 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.421250 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.421267 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.421283 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.524437 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.524473 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.524483 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.524501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.524511 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.630501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.630570 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.630589 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.630621 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.630640 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.737177 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.737233 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.737247 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.737267 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.737278 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.840745 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.840811 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.840827 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.840852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.840868 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.944268 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.944359 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.944380 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.944412 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.944438 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:14Z","lastTransitionTime":"2025-12-01T06:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.950915 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:14 crc kubenswrapper[4822]: E1201 06:51:14.951123 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.973830 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:14Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:14 crc kubenswrapper[4822]: I1201 06:51:14.992198 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:14Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.016922 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.040241 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.051227 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.051286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.051307 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.051333 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.051354 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.065348 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.081344 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.106032 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.140985 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.155799 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.155874 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.155892 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.155918 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.155939 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.162908 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.180119 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/0.log" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.182689 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.183514 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe" exitCode=1 Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.183578 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.184706 4822 scope.go:117] "RemoveContainer" containerID="b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.199181 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.214787 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.229267 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.243696 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.258792 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.258869 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.258884 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.258906 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.258929 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.266998 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.283897 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.301489 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.320615 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.335233 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.352147 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.361262 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.361289 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.361300 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.361316 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.361328 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.370101 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.384265 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.398719 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.413471 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.433483 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.451756 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.464482 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.464520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.464532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.464569 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.464587 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.471124 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.489381 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:15Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 06:51:14.802509 6119 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.802776 6119 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.803250 6119 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.803588 6119 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:14.803659 6119 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:14.803717 6119 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:14.803723 6119 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:14.803789 6119 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:14.803898 6119 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:14.803906 6119 factory.go:656] Stopping watch factory\\\\nI1201 06:51:14.803944 6119 ovnkube.go:599] Stopped ovnkube\\\\nI1201 06:51:14.803982 6119 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:15Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.567333 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.567369 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.567378 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.567391 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.567400 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.669458 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.669501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.669514 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.669531 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.669542 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.747401 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.772217 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.772243 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.772252 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.772264 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.772274 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.874859 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.874896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.874904 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.874917 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.874928 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.950186 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.950225 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:15 crc kubenswrapper[4822]: E1201 06:51:15.950397 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:15 crc kubenswrapper[4822]: E1201 06:51:15.950524 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.977060 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.977126 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.977147 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.977174 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:15 crc kubenswrapper[4822]: I1201 06:51:15.977194 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:15Z","lastTransitionTime":"2025-12-01T06:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.079598 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.079650 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.079663 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.079681 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.079694 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.182763 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.182833 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.182852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.182879 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.182898 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.189860 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/0.log" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.194051 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.194820 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.215464 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.238333 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.260831 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.276425 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.286060 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.286120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.286132 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.286153 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.286165 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.292482 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.312695 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.332882 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.347537 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.362188 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.376279 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.389152 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.389236 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.389257 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.389286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.389306 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.395806 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.410113 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.434103 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.457664 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:15Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 06:51:14.802509 6119 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.802776 6119 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.803250 6119 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.803588 6119 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:14.803659 6119 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:14.803717 6119 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:14.803723 6119 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:14.803789 6119 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:14.803898 6119 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:14.803906 6119 factory.go:656] Stopping watch factory\\\\nI1201 06:51:14.803944 6119 ovnkube.go:599] Stopped ovnkube\\\\nI1201 06:51:14.803982 6119 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.492746 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.492793 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.492808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.492826 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.492838 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.596845 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.596928 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.596945 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.596970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.596987 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.699871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.699948 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.699971 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.699999 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.700023 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.736345 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.736392 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.736405 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.736421 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.736434 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.752413 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.757491 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.757578 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.757598 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.757630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.757695 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.774245 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.780529 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.780650 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.780676 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.780705 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.780725 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.802655 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.808504 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.808592 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.808610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.808634 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.808656 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.825970 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.830666 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.830711 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.830728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.830748 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.830766 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.847529 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:16Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.847716 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.850067 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.850139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.850164 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.850197 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.850222 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.950622 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:16 crc kubenswrapper[4822]: E1201 06:51:16.950904 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.953629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.953700 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.953728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.953795 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:16 crc kubenswrapper[4822]: I1201 06:51:16.953820 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:16Z","lastTransitionTime":"2025-12-01T06:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.056621 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.056699 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.056718 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.056748 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.056767 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.159729 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.159779 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.159791 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.159807 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.159820 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.203246 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/1.log" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.204296 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/0.log" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.208222 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44" exitCode=1 Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.208288 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.208370 4822 scope.go:117] "RemoveContainer" containerID="b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.209581 4822 scope.go:117] "RemoveContainer" containerID="d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44" Dec 01 06:51:17 crc kubenswrapper[4822]: E1201 06:51:17.209918 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.231509 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.250987 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.263341 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.263401 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.263420 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.263452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.263472 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.275336 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.308718 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0fbc1332c2432e577fc6dc454bb42cfb697ad5b4762c3146830eb31dada5abe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:15Z\\\",\\\"message\\\":\\\"or.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 06:51:14.802509 6119 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.802776 6119 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.803250 6119 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:51:14.803588 6119 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:14.803659 6119 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:14.803717 6119 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:14.803723 6119 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:14.803789 6119 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:14.803898 6119 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:14.803906 6119 factory.go:656] Stopping watch factory\\\\nI1201 06:51:14.803944 6119 ovnkube.go:599] Stopped ovnkube\\\\nI1201 06:51:14.803982 6119 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.327268 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.343485 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.360516 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.366181 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.366265 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.366324 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.366347 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.366392 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.373321 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.387205 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.403893 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.415453 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.426490 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.440243 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.451500 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.469309 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.469373 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.469389 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.469418 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.469439 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.572751 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.572813 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.572831 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.572855 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.572872 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.675732 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.675775 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.675785 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.675809 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.675822 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.778577 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.778830 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.778962 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.779050 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.779129 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.882541 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.882604 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.882618 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.882635 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.882647 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.950329 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:17 crc kubenswrapper[4822]: E1201 06:51:17.950472 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.950329 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:17 crc kubenswrapper[4822]: E1201 06:51:17.950611 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.985701 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.985778 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.985790 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.985808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:17 crc kubenswrapper[4822]: I1201 06:51:17.985846 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:17Z","lastTransitionTime":"2025-12-01T06:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.089248 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.089295 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.089307 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.089332 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.089345 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.192447 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.192502 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.192513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.192531 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.192545 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.214026 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/1.log" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.218044 4822 scope.go:117] "RemoveContainer" containerID="d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44" Dec 01 06:51:18 crc kubenswrapper[4822]: E1201 06:51:18.218226 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.234710 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.255522 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.274882 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.295401 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.295458 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.295470 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.295490 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.295502 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.299925 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.330993 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.354480 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.375152 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.393737 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.398044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.398102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.398116 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.398139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.398153 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.412782 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.427707 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.449052 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.465954 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.475067 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777"] Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.475903 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.479587 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.480412 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.483468 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.500061 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.501899 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.501967 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.501986 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.502015 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.502033 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.520456 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.535681 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.548773 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.563490 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.568308 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f02cc583-763a-4279-84fd-2d6b561fb11a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.568459 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtgpr\" (UniqueName: \"kubernetes.io/projected/f02cc583-763a-4279-84fd-2d6b561fb11a-kube-api-access-dtgpr\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.568619 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f02cc583-763a-4279-84fd-2d6b561fb11a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.568698 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f02cc583-763a-4279-84fd-2d6b561fb11a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.578993 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.595356 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.605173 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.605273 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.605300 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.605331 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.605356 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.621319 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.645480 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.667738 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.670412 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f02cc583-763a-4279-84fd-2d6b561fb11a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.670479 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtgpr\" (UniqueName: \"kubernetes.io/projected/f02cc583-763a-4279-84fd-2d6b561fb11a-kube-api-access-dtgpr\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.670745 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f02cc583-763a-4279-84fd-2d6b561fb11a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.671987 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f02cc583-763a-4279-84fd-2d6b561fb11a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.672190 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f02cc583-763a-4279-84fd-2d6b561fb11a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.672905 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f02cc583-763a-4279-84fd-2d6b561fb11a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.682298 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f02cc583-763a-4279-84fd-2d6b561fb11a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.693693 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.704021 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtgpr\" (UniqueName: \"kubernetes.io/projected/f02cc583-763a-4279-84fd-2d6b561fb11a-kube-api-access-dtgpr\") pod \"ovnkube-control-plane-749d76644c-cp777\" (UID: \"f02cc583-763a-4279-84fd-2d6b561fb11a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.709103 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.709238 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.709270 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.709311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.709333 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.717027 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.741487 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.765256 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.787246 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.794419 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.812758 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.812798 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.812807 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.812828 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.812839 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.814744 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:18 crc kubenswrapper[4822]: W1201 06:51:18.818091 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf02cc583_763a_4279_84fd_2d6b561fb11a.slice/crio-45cadfa4ecaeab8c04fa4e62c3cdbc68ee712c2eec5bff369e164adb46c1c0df WatchSource:0}: Error finding container 45cadfa4ecaeab8c04fa4e62c3cdbc68ee712c2eec5bff369e164adb46c1c0df: Status 404 returned error can't find the container with id 45cadfa4ecaeab8c04fa4e62c3cdbc68ee712c2eec5bff369e164adb46c1c0df Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.920030 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.920313 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.920325 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.920342 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.920355 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:18Z","lastTransitionTime":"2025-12-01T06:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:18 crc kubenswrapper[4822]: I1201 06:51:18.950265 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:18 crc kubenswrapper[4822]: E1201 06:51:18.950457 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.023260 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.023335 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.023352 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.023380 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.023398 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.127782 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.127856 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.127874 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.127899 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.127917 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.224313 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" event={"ID":"f02cc583-763a-4279-84fd-2d6b561fb11a","Type":"ContainerStarted","Data":"45cadfa4ecaeab8c04fa4e62c3cdbc68ee712c2eec5bff369e164adb46c1c0df"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.230797 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.230902 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.230925 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.230957 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.230980 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.334957 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.335026 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.335045 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.335073 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.335092 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.438780 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.438835 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.438849 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.438871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.438883 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.541822 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.541858 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.541868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.541882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.541892 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.600864 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-lk8mq"] Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.601342 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.601415 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.621616 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.642214 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.644538 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.644597 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.644611 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.644631 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.644643 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.656649 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.677469 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.685889 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.685926 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jql47\" (UniqueName: \"kubernetes.io/projected/80225810-9d72-45b0-980f-1cb242d987e8-kube-api-access-jql47\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.703600 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.724851 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.747027 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.747073 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.747087 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.747132 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.747151 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.748466 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.775728 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.787316 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.787471 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787509 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:51:35.787479747 +0000 UTC m=+51.108287433 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.787574 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787615 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787679 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:35.787662573 +0000 UTC m=+51.108470299 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.787616 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787701 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.787767 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jql47\" (UniqueName: \"kubernetes.io/projected/80225810-9d72-45b0-980f-1cb242d987e8-kube-api-access-jql47\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787838 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:20.287809267 +0000 UTC m=+35.608616953 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787754 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.787879 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:35.787870638 +0000 UTC m=+51.108678324 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.794865 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.808036 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.815706 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jql47\" (UniqueName: \"kubernetes.io/projected/80225810-9d72-45b0-980f-1cb242d987e8-kube-api-access-jql47\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.823155 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.837485 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.849645 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.849720 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.849744 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.849774 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.849797 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.850766 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.864468 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.877124 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.886244 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.888894 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.888948 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889090 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889128 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889144 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889195 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:35.889178749 +0000 UTC m=+51.209986435 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889093 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889229 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889246 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.889305 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:35.889288362 +0000 UTC m=+51.210096048 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.950741 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.950786 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.950868 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:19 crc kubenswrapper[4822]: E1201 06:51:19.951062 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.952812 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.952860 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.952871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.952889 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:19 crc kubenswrapper[4822]: I1201 06:51:19.952902 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:19Z","lastTransitionTime":"2025-12-01T06:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.056201 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.056265 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.056286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.056314 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.056333 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.159846 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.159912 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.159931 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.159955 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.159973 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.231580 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" event={"ID":"f02cc583-763a-4279-84fd-2d6b561fb11a","Type":"ContainerStarted","Data":"feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.231648 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" event={"ID":"f02cc583-763a-4279-84fd-2d6b561fb11a","Type":"ContainerStarted","Data":"ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.249745 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.263126 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.263174 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.263192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.263215 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.263232 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.274784 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.293895 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:20 crc kubenswrapper[4822]: E1201 06:51:20.294171 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:20 crc kubenswrapper[4822]: E1201 06:51:20.294297 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:21.294269167 +0000 UTC m=+36.615076883 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.312297 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.333965 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.354400 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.367065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.367120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.367138 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.367185 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.367202 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.372969 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.393699 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.409151 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.428030 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.445884 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.470035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.470106 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.470131 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.470166 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.470190 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.470838 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.490948 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.508250 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.529855 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.544647 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.556386 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.573724 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.573801 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.573826 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.573855 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.573877 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.676836 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.676898 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.676917 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.676942 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.676966 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.781676 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.781776 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.781803 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.781839 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.781875 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.885460 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.885514 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.885532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.885586 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.885605 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.950488 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.950591 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:20 crc kubenswrapper[4822]: E1201 06:51:20.950697 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:20 crc kubenswrapper[4822]: E1201 06:51:20.950839 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.988031 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.988088 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.988106 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.988131 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:20 crc kubenswrapper[4822]: I1201 06:51:20.988149 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:20Z","lastTransitionTime":"2025-12-01T06:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.091599 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.091649 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.091666 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.091690 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.091706 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.194744 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.194789 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.194803 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.194819 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.194832 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.297303 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.297458 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.297479 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.297504 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.297520 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.304086 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:21 crc kubenswrapper[4822]: E1201 06:51:21.304258 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:21 crc kubenswrapper[4822]: E1201 06:51:21.304321 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:23.304300469 +0000 UTC m=+38.625108195 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.401102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.401178 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.401200 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.401225 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.401242 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.504952 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.505016 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.505036 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.505064 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.505081 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.607762 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.607809 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.607827 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.607843 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.607857 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.710638 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.710709 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.710728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.710755 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.710774 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.814535 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.814610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.814623 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.814643 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.814656 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.917925 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.917970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.917980 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.917998 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.918008 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:21Z","lastTransitionTime":"2025-12-01T06:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.950546 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:21 crc kubenswrapper[4822]: E1201 06:51:21.950762 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:21 crc kubenswrapper[4822]: I1201 06:51:21.950535 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:21 crc kubenswrapper[4822]: E1201 06:51:21.950985 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.022793 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.022848 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.022864 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.022891 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.022910 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.125870 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.125916 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.125928 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.125946 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.125958 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.142284 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.162862 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.179855 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.193266 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.211368 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.228952 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.228994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.229005 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.229025 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.229040 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.232820 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.252417 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.268117 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.281444 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.296649 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.309912 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.324286 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.332135 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.332180 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.332190 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.332221 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.332239 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.336691 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.351175 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.365395 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.381791 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.396634 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:22Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.434654 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.434729 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.434751 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.434779 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.434799 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.538823 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.538900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.538911 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.538931 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.538943 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.642493 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.642593 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.642608 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.642632 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.642647 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.745797 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.745873 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.745897 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.745925 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.745943 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.848884 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.848933 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.848947 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.848967 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.848978 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.950179 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.950252 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:22 crc kubenswrapper[4822]: E1201 06:51:22.950417 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:22 crc kubenswrapper[4822]: E1201 06:51:22.950538 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.952206 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.952239 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.952249 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.952262 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:22 crc kubenswrapper[4822]: I1201 06:51:22.952273 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:22Z","lastTransitionTime":"2025-12-01T06:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.055301 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.055368 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.055387 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.055412 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.055429 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.159850 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.160314 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.160405 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.160568 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.160693 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.263796 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.263845 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.263858 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.263882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.263895 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.327307 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:23 crc kubenswrapper[4822]: E1201 06:51:23.327673 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:23 crc kubenswrapper[4822]: E1201 06:51:23.328430 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:27.328403347 +0000 UTC m=+42.649211033 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.366823 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.366907 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.366926 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.366955 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.366976 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.470172 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.470245 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.470268 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.470297 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.470318 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.573682 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.573766 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.573792 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.573824 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.573849 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.677273 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.677346 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.677373 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.677411 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.677442 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.780725 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.780792 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.780815 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.780847 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.780876 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.884303 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.884431 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.884453 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.884478 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.884499 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.950736 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.950735 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:23 crc kubenswrapper[4822]: E1201 06:51:23.950994 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:23 crc kubenswrapper[4822]: E1201 06:51:23.951115 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.987424 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.987485 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.987509 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.987536 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:23 crc kubenswrapper[4822]: I1201 06:51:23.987595 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:23Z","lastTransitionTime":"2025-12-01T06:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.090862 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.090924 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.090944 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.090970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.090987 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.194610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.194693 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.194717 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.194747 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.194773 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.298536 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.298682 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.298705 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.298737 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.298767 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.402532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.402633 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.402656 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.402690 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.402716 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.505826 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.505870 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.505882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.505902 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.505914 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.609123 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.609188 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.609205 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.609228 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.609248 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.712079 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.712122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.712132 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.712149 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.712160 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.814943 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.814998 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.815016 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.815038 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.815055 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.917688 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.917748 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.917769 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.917791 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.917808 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:24Z","lastTransitionTime":"2025-12-01T06:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.950458 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.950497 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:24 crc kubenswrapper[4822]: E1201 06:51:24.950705 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:24 crc kubenswrapper[4822]: E1201 06:51:24.951119 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.973002 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:24Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:24 crc kubenswrapper[4822]: I1201 06:51:24.995934 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:24Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.015007 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.020737 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.020870 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.020957 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.021062 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.021156 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.047798 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.066423 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.088925 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.111167 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.124163 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.124231 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.124256 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.124289 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.124312 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.128219 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.152894 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.174614 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.191881 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.205650 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.227396 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.227435 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.227447 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.227464 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.227476 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.227848 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.256319 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.273326 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.291141 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:25Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.330584 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.330659 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.330677 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.331091 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.331149 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.434795 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.434863 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.434880 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.434905 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.434963 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.538145 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.538236 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.538261 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.538287 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.538308 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.641646 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.641713 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.641729 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.641759 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.641779 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.745524 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.745644 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.745668 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.745697 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.745717 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.849062 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.849127 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.849151 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.849182 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.849204 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.949787 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.949787 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:25 crc kubenswrapper[4822]: E1201 06:51:25.950282 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:25 crc kubenswrapper[4822]: E1201 06:51:25.950374 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.952339 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.952388 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.952404 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.952428 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:25 crc kubenswrapper[4822]: I1201 06:51:25.952452 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:25Z","lastTransitionTime":"2025-12-01T06:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.056340 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.056407 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.056427 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.056452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.056473 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.159621 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.159684 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.159701 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.159734 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.159752 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.262189 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.262256 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.262275 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.262303 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.262324 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.365088 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.365162 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.365188 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.365217 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.365241 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.470095 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.470905 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.470966 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.470995 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.471024 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.574201 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.574267 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.574286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.574317 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.574364 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.678072 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.678157 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.678185 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.678217 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.678239 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.780883 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.781035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.781053 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.781269 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.781294 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.884371 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.884417 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.884430 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.884449 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.884462 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.949958 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.950078 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:26 crc kubenswrapper[4822]: E1201 06:51:26.950185 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:26 crc kubenswrapper[4822]: E1201 06:51:26.950386 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.987750 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.987832 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.987858 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.987884 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:26 crc kubenswrapper[4822]: I1201 06:51:26.987903 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:26Z","lastTransitionTime":"2025-12-01T06:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.090835 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.090888 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.090904 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.090926 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.090941 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.098978 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.099029 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.099041 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.099059 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.099073 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.112727 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.117324 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.117399 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.117421 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.117452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.117472 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.132620 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.137123 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.137165 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.137175 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.137194 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.137207 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.152949 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.157032 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.157114 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.157137 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.157165 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.157185 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.176701 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.182310 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.182350 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.182360 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.182380 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.182393 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.203280 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.203405 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.205020 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.205056 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.205069 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.205094 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.205112 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.308311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.308377 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.308395 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.308423 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.308447 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.376717 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.376988 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.377087 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:35.377063093 +0000 UTC m=+50.697870819 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.411242 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.411284 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.411297 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.411315 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.411326 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.514484 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.514614 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.514651 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.514682 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.514704 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.617733 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.617866 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.617888 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.617910 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.617930 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.721287 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.721472 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.721498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.721526 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.721590 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.824271 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.824361 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.824375 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.824396 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.824412 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.927363 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.927444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.927467 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.927498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.927519 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:27Z","lastTransitionTime":"2025-12-01T06:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.949853 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:27 crc kubenswrapper[4822]: I1201 06:51:27.949893 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.950012 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:27 crc kubenswrapper[4822]: E1201 06:51:27.950156 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.030982 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.031238 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.031397 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.031517 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.031667 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.135356 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.135480 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.135501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.135525 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.135543 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.239504 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.239591 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.239605 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.239629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.239646 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.343346 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.343994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.344214 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.344377 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.344504 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.448518 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.448630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.448658 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.448691 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.448714 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.552252 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.552387 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.552407 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.552434 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.552451 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.655525 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.655638 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.655663 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.655690 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.655713 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.759476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.759585 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.759605 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.759632 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.759657 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.864713 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.864806 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.864834 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.864868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.864896 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.950114 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.950137 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:28 crc kubenswrapper[4822]: E1201 06:51:28.950457 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:28 crc kubenswrapper[4822]: E1201 06:51:28.950629 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.967165 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.967200 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.967211 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.967227 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:28 crc kubenswrapper[4822]: I1201 06:51:28.967241 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:28Z","lastTransitionTime":"2025-12-01T06:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.070136 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.070171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.070348 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.070371 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.070382 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.173838 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.173896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.173912 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.173938 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.173957 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.276938 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.277009 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.277032 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.277060 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.277086 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.379650 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.379719 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.379759 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.379790 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.379813 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.482963 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.483021 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.483044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.483068 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.483088 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.585678 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.585749 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.585765 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.585795 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.585812 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.688693 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.688757 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.688774 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.688798 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.688815 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.792123 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.792177 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.792193 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.792219 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.792237 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.895259 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.895874 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.896433 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.896808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.897143 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:29Z","lastTransitionTime":"2025-12-01T06:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.950017 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:29 crc kubenswrapper[4822]: I1201 06:51:29.950037 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:29 crc kubenswrapper[4822]: E1201 06:51:29.950762 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:29 crc kubenswrapper[4822]: E1201 06:51:29.950773 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.001513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.001625 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.001654 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.001692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.001716 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.105324 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.105391 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.105415 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.105441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.105457 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.209290 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.209424 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.209444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.209469 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.209492 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.313097 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.313199 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.313283 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.313322 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.313347 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.415894 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.415970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.415980 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.416001 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.416011 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.519785 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.519859 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.519878 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.519906 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.519929 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.623797 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.623876 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.623897 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.623922 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.623945 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.727029 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.727088 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.727102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.727123 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.727141 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.830449 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.830513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.830538 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.830619 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.830641 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.934438 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.934512 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.934534 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.934591 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.934614 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:30Z","lastTransitionTime":"2025-12-01T06:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.950845 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.950967 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:30 crc kubenswrapper[4822]: E1201 06:51:30.951207 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:30 crc kubenswrapper[4822]: E1201 06:51:30.951323 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:30 crc kubenswrapper[4822]: I1201 06:51:30.952304 4822 scope.go:117] "RemoveContainer" containerID="d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.038648 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.038687 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.038698 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.038714 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.038727 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.141406 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.141476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.141495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.141521 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.141541 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.244972 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.245029 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.245043 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.245066 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.245082 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.274025 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/1.log" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.278349 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.279049 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.294463 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.312679 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.326427 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.347944 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.348008 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.348026 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.348051 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.348070 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.349411 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.375539 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.392075 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.413130 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.437068 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.451810 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.451872 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.451891 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.451919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.451938 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.458329 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.479326 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.497757 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.517123 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.537297 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.552866 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.554446 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.554477 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.554486 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.554504 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.554515 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.565816 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.589179 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.657444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.657497 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.657508 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.657529 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.657597 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.760536 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.760609 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.760624 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.760647 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.760679 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.863793 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.863885 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.863908 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.863958 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.863978 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.949902 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.949963 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:31 crc kubenswrapper[4822]: E1201 06:51:31.950179 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:31 crc kubenswrapper[4822]: E1201 06:51:31.950338 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.967220 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.967301 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.967316 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.967342 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:31 crc kubenswrapper[4822]: I1201 06:51:31.967356 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:31Z","lastTransitionTime":"2025-12-01T06:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.070562 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.070621 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.070633 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.070655 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.070674 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.174187 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.174259 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.174277 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.174309 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.174338 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.277474 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.277579 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.277591 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.277614 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.277628 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.282933 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/2.log" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.283855 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/1.log" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.287223 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193" exitCode=1 Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.287274 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.287343 4822 scope.go:117] "RemoveContainer" containerID="d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.288293 4822 scope.go:117] "RemoveContainer" containerID="2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193" Dec 01 06:51:32 crc kubenswrapper[4822]: E1201 06:51:32.288585 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.316797 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d461e02cf10d4ed9178275673ff9222ae736fa01ee373b12f430dfe53dd8ea44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:16Z\\\",\\\"message\\\":\\\" 6241 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 06:51:16.062558 6241 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:16.062577 6241 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:16.062595 6241 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:16.062618 6241 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:16.062623 6241 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:16.062626 6241 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:16.062649 6241 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:16.062656 6241 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:16.062663 6241 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:16.062668 6241 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:16.062657 6241 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:16.062676 6241 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:51:16.062688 6241 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:16.062701 6241 factory.go:656] Stopping watch factory\\\\nI1201 06:51:16.062715 6241 ovnkube.go:599] Stopped ovnkube\\\\nI1201 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.338907 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.358213 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.375331 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.380993 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.381065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.381085 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.381113 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.381133 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.399757 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.416516 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.436021 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.453338 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.474979 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.483947 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.484006 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.484024 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.484049 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.484064 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.496325 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.518435 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.541125 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.560933 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.584901 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.586618 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.586697 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.586720 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.586751 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.586771 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.605813 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.624285 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.690673 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.690726 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.690739 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.690760 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.690775 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.793914 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.793968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.793982 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.794003 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.794018 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.897126 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.897171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.897181 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.897199 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.897211 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:32Z","lastTransitionTime":"2025-12-01T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.950399 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:32 crc kubenswrapper[4822]: I1201 06:51:32.950436 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:32 crc kubenswrapper[4822]: E1201 06:51:32.951148 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:32 crc kubenswrapper[4822]: E1201 06:51:32.951301 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.000492 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.000586 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.000609 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.000637 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.000659 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.102996 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.103303 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.103387 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.103489 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.103596 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.206714 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.206846 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.206867 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.207293 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.207512 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.294119 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/2.log" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.300690 4822 scope.go:117] "RemoveContainer" containerID="2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193" Dec 01 06:51:33 crc kubenswrapper[4822]: E1201 06:51:33.300950 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.311298 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.311540 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.311806 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.312034 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.312242 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.323968 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.347038 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.368349 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.389278 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.407201 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.416029 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.416092 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.416117 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.416147 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.416169 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.428797 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.451713 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.471118 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.488069 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.510120 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.519337 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.519417 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.519442 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.519475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.519497 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.530758 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.551645 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.569001 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.593942 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.622062 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.622118 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.622137 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.622161 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.622178 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.625571 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.645488 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.725596 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.725669 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.725696 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.725728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.725752 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.828906 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.828981 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.829005 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.829028 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.829045 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.931741 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.931810 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.931834 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.931862 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.931883 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:33Z","lastTransitionTime":"2025-12-01T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.950319 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:33 crc kubenswrapper[4822]: I1201 06:51:33.950367 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:33 crc kubenswrapper[4822]: E1201 06:51:33.950491 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:33 crc kubenswrapper[4822]: E1201 06:51:33.950690 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.035139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.035227 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.035253 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.035287 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.035308 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.138767 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.138848 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.138864 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.138884 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.138900 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.241743 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.241834 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.241882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.241914 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.241937 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.345248 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.345324 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.345342 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.345367 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.345387 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.448936 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.449044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.449074 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.449109 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.449137 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.552263 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.552336 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.552358 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.552387 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.552409 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.655120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.655192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.655213 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.655237 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.655256 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.759196 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.759266 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.759284 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.759314 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.759332 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.860931 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.860977 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.860988 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.861002 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.861010 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.950299 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:34 crc kubenswrapper[4822]: E1201 06:51:34.950513 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.950616 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:34 crc kubenswrapper[4822]: E1201 06:51:34.950945 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.964113 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.964185 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.964205 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.964229 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.964251 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:34Z","lastTransitionTime":"2025-12-01T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.973001 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:34 crc kubenswrapper[4822]: I1201 06:51:34.995214 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.015722 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.038945 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.059534 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.068029 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.068274 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.068310 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.068397 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.068508 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.087025 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.121887 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.144286 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.167115 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.173121 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.173176 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.173196 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.173225 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.173245 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.192109 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.211507 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.236605 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.253240 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.269961 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.277068 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.277122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.277139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.277166 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.277183 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.287120 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.301075 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.380017 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.380317 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.380581 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.380700 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.380818 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.473233 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.473500 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.473673 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:51:51.473645049 +0000 UTC m=+66.794452775 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.484751 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.484812 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.484830 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.484859 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.484878 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.588622 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.588708 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.588731 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.588764 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.588789 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.691956 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.692044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.692069 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.692123 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.692143 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.795831 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.795926 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.795953 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.795986 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.796010 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.879234 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.879321 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:52:07.879291713 +0000 UTC m=+83.200099399 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.879411 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.879474 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.879516 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:52:07.879508719 +0000 UTC m=+83.200316405 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.879517 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.879681 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.879745 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:52:07.879729116 +0000 UTC m=+83.200536832 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.900166 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.900234 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.900249 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.900272 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.900288 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:35Z","lastTransitionTime":"2025-12-01T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.950239 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.950353 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.950602 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.950721 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.981041 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:35 crc kubenswrapper[4822]: I1201 06:51:35.981110 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981276 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981303 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981317 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981387 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:52:07.981370576 +0000 UTC m=+83.302178272 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981425 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981495 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981523 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:35 crc kubenswrapper[4822]: E1201 06:51:35.981686 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:52:07.981647013 +0000 UTC m=+83.302454879 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.003099 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.003146 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.003159 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.003176 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.003192 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.107192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.107238 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.107248 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.107266 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.107280 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.211122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.211192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.211211 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.211238 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.211258 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.314805 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.314901 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.314926 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.314957 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.314981 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.418590 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.418675 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.418698 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.419089 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.419293 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.522208 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.522256 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.522270 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.522293 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.522306 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.625258 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.625300 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.625311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.625328 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.625340 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.728888 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.728947 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.728958 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.728978 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.728995 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.832715 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.832766 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.832782 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.832805 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.832900 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.937327 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.937413 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.937438 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.937471 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.937491 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:36Z","lastTransitionTime":"2025-12-01T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.950785 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:36 crc kubenswrapper[4822]: I1201 06:51:36.950879 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:36 crc kubenswrapper[4822]: E1201 06:51:36.950984 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:36 crc kubenswrapper[4822]: E1201 06:51:36.951081 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.040988 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.041053 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.041070 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.041095 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.041114 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.144622 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.144699 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.144728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.144760 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.144784 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.248267 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.248325 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.248343 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.248366 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.248386 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.351717 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.351785 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.351851 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.351877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.351894 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.382910 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.382976 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.382996 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.383019 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.383037 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.403281 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.408467 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.408525 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.408543 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.408606 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.408628 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.428610 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.434244 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.434299 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.434316 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.434340 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.434359 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.455203 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.460375 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.460428 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.460444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.460468 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.460485 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.479816 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.485158 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.485262 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.485280 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.485305 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.485328 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.505512 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.505787 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.508371 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.508444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.508468 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.508500 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.508523 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.538527 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.551798 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.562479 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.580826 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.596277 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.611985 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.612050 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.612072 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.612105 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.612126 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.628993 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.648478 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.673600 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.690012 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.715115 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.715172 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.715191 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.715215 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.715235 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.714877 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.733455 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.755180 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.772378 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.794586 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.813519 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.818601 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.818658 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.818679 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.818703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.818719 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.831479 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.851470 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.869814 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.922276 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.922331 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.922343 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.922364 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.922377 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:37Z","lastTransitionTime":"2025-12-01T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.950284 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:37 crc kubenswrapper[4822]: I1201 06:51:37.950345 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.950485 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:37 crc kubenswrapper[4822]: E1201 06:51:37.950753 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.025422 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.025480 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.025498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.025522 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.025540 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.129113 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.129173 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.129189 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.129211 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.129225 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.232506 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.232570 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.232583 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.232606 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.232620 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.335440 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.335532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.335594 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.335628 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.335652 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.438824 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.438884 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.438901 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.438930 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.438955 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.542598 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.542676 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.542700 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.542727 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.542747 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.646614 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.646684 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.646704 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.646735 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.646758 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.750410 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.750462 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.750475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.750493 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.750505 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.853042 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.853081 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.853090 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.853121 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.853131 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.950405 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.950515 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:38 crc kubenswrapper[4822]: E1201 06:51:38.950689 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:38 crc kubenswrapper[4822]: E1201 06:51:38.950779 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.957230 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.957271 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.957286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.957311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:38 crc kubenswrapper[4822]: I1201 06:51:38.957327 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:38Z","lastTransitionTime":"2025-12-01T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.061593 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.061656 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.061669 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.061692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.061708 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.165224 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.165294 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.165313 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.165341 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.165359 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.269172 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.269241 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.269259 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.269290 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.269310 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.372697 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.372788 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.372808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.372839 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.372869 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.477596 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.477673 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.477692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.477724 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.477745 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.580603 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.580677 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.580699 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.580729 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.580748 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.684066 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.684124 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.684142 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.684166 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.684186 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.788919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.788996 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.789013 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.789043 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.789070 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.892882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.892947 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.892964 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.893010 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.893029 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.950583 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.950583 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:39 crc kubenswrapper[4822]: E1201 06:51:39.950810 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:39 crc kubenswrapper[4822]: E1201 06:51:39.950911 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.996271 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.996353 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.996382 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.996413 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:39 crc kubenswrapper[4822]: I1201 06:51:39.996435 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:39Z","lastTransitionTime":"2025-12-01T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.103540 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.103646 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.103672 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.103704 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.103727 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.207269 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.207338 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.207351 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.207376 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.207392 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.310862 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.310932 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.310951 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.310979 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.310998 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.415123 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.415198 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.415212 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.415237 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.415251 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.518571 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.518655 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.518673 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.518704 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.518726 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.621783 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.621858 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.621875 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.621902 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.621921 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.724716 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.724750 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.724761 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.724775 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.724786 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.828475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.828532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.828586 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.828623 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.828644 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.931808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.931883 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.931896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.931914 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.931926 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:40Z","lastTransitionTime":"2025-12-01T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.950397 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:40 crc kubenswrapper[4822]: E1201 06:51:40.950535 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:40 crc kubenswrapper[4822]: I1201 06:51:40.950747 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:40 crc kubenswrapper[4822]: E1201 06:51:40.950992 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.035520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.035660 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.035691 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.035723 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.035748 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.139743 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.139810 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.139829 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.139863 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.139890 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.243223 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.243330 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.243349 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.243371 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.243390 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.346777 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.346872 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.346900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.346931 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.346953 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.449944 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.450008 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.450031 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.450060 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.450081 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.553274 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.553343 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.553362 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.553386 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.553406 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.656332 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.656384 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.656395 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.656416 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.656429 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.758979 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.759055 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.759070 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.759096 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.759113 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.861898 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.861987 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.862000 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.862016 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.862027 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.950501 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.950543 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:41 crc kubenswrapper[4822]: E1201 06:51:41.950809 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:41 crc kubenswrapper[4822]: E1201 06:51:41.950859 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.965173 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.965256 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.965276 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.965305 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:41 crc kubenswrapper[4822]: I1201 06:51:41.965331 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:41Z","lastTransitionTime":"2025-12-01T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.068044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.068085 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.068096 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.068112 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.068123 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.171101 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.171144 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.171159 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.171174 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.171184 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.274287 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.274381 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.274406 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.274437 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.274458 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.377874 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.377962 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.377980 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.378173 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.378191 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.481449 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.481511 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.481528 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.481578 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.481597 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.584738 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.584813 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.584839 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.584867 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.584889 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.688102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.688262 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.688284 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.688311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.688329 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.791092 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.791171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.791193 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.791219 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.791239 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.895084 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.895182 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.895200 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.895223 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.895241 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.950883 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.951013 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:42 crc kubenswrapper[4822]: E1201 06:51:42.951215 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:42 crc kubenswrapper[4822]: E1201 06:51:42.951599 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.998849 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.998911 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.998931 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.998960 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:42 crc kubenswrapper[4822]: I1201 06:51:42.998980 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:42Z","lastTransitionTime":"2025-12-01T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.102535 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.102647 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.102666 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.102690 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.102709 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.205659 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.205714 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.205728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.205750 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.205765 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.309346 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.309421 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.309447 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.309485 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.309509 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.412331 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.412392 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.412413 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.412437 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.412454 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.515852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.515919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.515936 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.515965 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.515989 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.620356 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.620433 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.620457 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.620490 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.620512 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.723867 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.723949 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.723974 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.724004 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.724027 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.827290 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.827363 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.827385 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.827413 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.827440 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.930448 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.930516 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.930537 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.930595 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.930629 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:43Z","lastTransitionTime":"2025-12-01T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.949841 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:43 crc kubenswrapper[4822]: I1201 06:51:43.949848 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:43 crc kubenswrapper[4822]: E1201 06:51:43.950056 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:43 crc kubenswrapper[4822]: E1201 06:51:43.950272 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.034175 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.034292 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.034362 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.034397 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.034418 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.137934 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.138015 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.138035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.138062 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.138080 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.240912 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.240994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.241014 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.241043 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.241063 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.344189 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.344272 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.344292 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.344317 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.344335 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.452309 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.452383 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.452400 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.452423 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.452440 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.555238 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.555282 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.555291 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.555306 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.555315 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.658390 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.658467 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.658490 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.658520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.658543 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.761780 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.761854 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.761877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.761907 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.761929 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.865134 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.865186 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.865203 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.865228 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.865245 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.950028 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.950107 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:44 crc kubenswrapper[4822]: E1201 06:51:44.950332 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:44 crc kubenswrapper[4822]: E1201 06:51:44.950587 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.969240 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.969344 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.969365 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.969391 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.969410 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:44Z","lastTransitionTime":"2025-12-01T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.971832 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:44 crc kubenswrapper[4822]: I1201 06:51:44.993748 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.016303 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.040312 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.067930 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.072864 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.072942 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.072960 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.072986 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.073004 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.086751 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.106908 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.127443 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.143370 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.162726 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.176477 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.176565 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.176581 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.176607 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.176622 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.186898 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.205615 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.225417 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.243333 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.263816 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.279949 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.279990 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.280003 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.280019 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.280034 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.294925 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.316247 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.382456 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.382504 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.382520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.382544 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.382598 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.486726 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.486781 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.486798 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.486871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.486891 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.590751 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.590819 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.590839 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.590865 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.590886 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.694233 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.694292 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.694310 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.694337 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.694357 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.797662 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.797718 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.797731 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.797750 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.797765 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.901011 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.901056 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.901068 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.901092 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.901104 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:45Z","lastTransitionTime":"2025-12-01T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.950131 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:45 crc kubenswrapper[4822]: I1201 06:51:45.950185 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:45 crc kubenswrapper[4822]: E1201 06:51:45.950310 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:45 crc kubenswrapper[4822]: E1201 06:51:45.950411 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.004647 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.004703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.004716 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.004737 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.004750 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.108669 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.108727 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.108739 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.108764 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.108778 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.211880 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.211949 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.211968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.212006 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.212026 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.315542 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.315637 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.315657 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.315680 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.315697 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.418780 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.418867 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.418885 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.418913 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.418977 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.523954 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.524012 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.524028 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.524053 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.524072 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.628448 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.628538 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.628625 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.628656 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.628708 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.731842 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.731932 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.731958 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.731988 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.732005 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.835796 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.835861 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.835871 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.835893 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.835906 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.939357 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.939428 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.939446 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.939470 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.939489 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:46Z","lastTransitionTime":"2025-12-01T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.949888 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.949931 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:46 crc kubenswrapper[4822]: E1201 06:51:46.950105 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:46 crc kubenswrapper[4822]: E1201 06:51:46.950849 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:46 crc kubenswrapper[4822]: I1201 06:51:46.952144 4822 scope.go:117] "RemoveContainer" containerID="2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193" Dec 01 06:51:46 crc kubenswrapper[4822]: E1201 06:51:46.952691 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.044234 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.044306 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.044326 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.044358 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.044381 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.149075 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.149190 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.149246 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.149270 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.149287 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.253995 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.254181 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.254203 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.254422 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.254444 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.358309 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.358372 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.358390 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.358415 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.358434 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.463532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.463646 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.463663 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.463839 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.463880 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.567104 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.567171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.567188 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.567215 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.567233 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.671351 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.671448 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.671473 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.671955 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.672529 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.718900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.718952 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.718970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.718993 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.719007 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.738578 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.744201 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.744286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.744311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.744339 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.744358 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.761002 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.766070 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.766132 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.766151 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.766174 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.766192 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.784224 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.788353 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.788400 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.788411 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.788431 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.788443 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.801933 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.813193 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.813281 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.813310 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.813339 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.813356 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.835487 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.835701 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.837491 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.837543 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.837576 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.837600 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.837615 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.941164 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.941223 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.941236 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.941254 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.941267 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:47Z","lastTransitionTime":"2025-12-01T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.950784 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:47 crc kubenswrapper[4822]: I1201 06:51:47.950787 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.950923 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:47 crc kubenswrapper[4822]: E1201 06:51:47.951087 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.044731 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.044810 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.044834 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.044864 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.044887 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.147867 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.147931 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.147951 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.147977 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.147999 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.251210 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.251277 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.251302 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.251331 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.251356 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.355630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.355763 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.355790 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.355820 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.355841 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.459047 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.459116 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.459134 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.459158 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.459175 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.561810 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.561877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.561900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.561930 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.561951 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.665196 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.665246 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.665263 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.665286 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.665302 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.768210 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.768253 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.768266 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.768284 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.768296 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.870272 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.870324 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.870336 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.870354 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.870364 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.950139 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:48 crc kubenswrapper[4822]: E1201 06:51:48.950318 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.950466 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:48 crc kubenswrapper[4822]: E1201 06:51:48.950729 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.973328 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.973385 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.973447 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.973476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:48 crc kubenswrapper[4822]: I1201 06:51:48.973546 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:48Z","lastTransitionTime":"2025-12-01T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.075979 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.076022 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.076040 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.076142 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.076159 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.178891 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.178963 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.178985 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.179011 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.179029 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.281808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.281860 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.281878 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.281899 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.281913 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.385386 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.385452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.385466 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.385684 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.385715 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.488695 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.488746 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.488760 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.488781 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.488794 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.592652 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.592731 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.592755 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.592790 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.592816 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.695107 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.695152 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.695169 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.695194 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.695212 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.797410 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.797583 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.797618 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.797645 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.797680 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.900422 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.900465 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.900476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.900495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.900507 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:49Z","lastTransitionTime":"2025-12-01T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.950743 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:49 crc kubenswrapper[4822]: I1201 06:51:49.950809 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:49 crc kubenswrapper[4822]: E1201 06:51:49.950936 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:49 crc kubenswrapper[4822]: E1201 06:51:49.951043 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.003703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.003775 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.003804 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.003836 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.003858 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.107438 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.107546 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.107589 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.107646 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.107667 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.213900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.214016 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.214038 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.214071 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.214125 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.317606 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.317638 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.317647 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.317661 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.317671 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.419673 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.419699 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.419721 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.419735 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.419743 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.522516 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.522586 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.522600 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.522617 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.522628 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.625061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.625139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.625162 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.625192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.625215 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.727830 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.727875 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.727887 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.727902 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.727915 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.830191 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.830255 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.830276 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.830301 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.830320 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.933424 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.933482 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.933498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.933519 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.933531 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:50Z","lastTransitionTime":"2025-12-01T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.950844 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:50 crc kubenswrapper[4822]: I1201 06:51:50.950889 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:50 crc kubenswrapper[4822]: E1201 06:51:50.951008 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:50 crc kubenswrapper[4822]: E1201 06:51:50.951176 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.037076 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.037140 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.037153 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.037172 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.037188 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.140738 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.140791 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.140802 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.140823 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.140836 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.243799 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.243868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.243887 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.243916 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.243941 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.346615 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.346660 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.346670 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.346685 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.346695 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.449854 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.449940 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.449960 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.449989 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.450009 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.554208 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.554282 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.554295 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.554321 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.554340 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.567398 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:51 crc kubenswrapper[4822]: E1201 06:51:51.567703 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:51 crc kubenswrapper[4822]: E1201 06:51:51.567803 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:52:23.567775961 +0000 UTC m=+98.888583687 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.658309 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.658395 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.658415 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.658445 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.658466 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.762612 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.762662 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.762681 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.762703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.762719 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.866361 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.866434 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.866453 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.866485 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.866509 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.950450 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.950573 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:51 crc kubenswrapper[4822]: E1201 06:51:51.950621 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:51 crc kubenswrapper[4822]: E1201 06:51:51.950867 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.969328 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.969382 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.969393 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.969415 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:51 crc kubenswrapper[4822]: I1201 06:51:51.969429 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:51Z","lastTransitionTime":"2025-12-01T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.072639 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.072687 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.072696 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.072714 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.072727 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.175330 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.175372 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.175387 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.175401 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.175411 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.278565 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.278609 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.278625 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.278642 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.278651 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.380399 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/0.log" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.380494 4822 generic.go:334] "Generic (PLEG): container finished" podID="34b58185-4742-4187-9243-860433c413d8" containerID="671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b" exitCode=1 Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.380542 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerDied","Data":"671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.381165 4822 scope.go:117] "RemoveContainer" containerID="671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.381201 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.381269 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.381302 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.381334 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.381359 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.406890 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.423241 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.438198 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.455446 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.477878 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.484473 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.484522 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.484534 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.484572 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.484591 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.492964 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.507995 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.519650 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.532883 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.545471 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.558591 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:51Z\\\",\\\"message\\\":\\\"2025-12-01T06:51:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233\\\\n2025-12-01T06:51:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233 to /host/opt/cni/bin/\\\\n2025-12-01T06:51:06Z [verbose] multus-daemon started\\\\n2025-12-01T06:51:06Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:51:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.572858 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.587646 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.587715 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.587736 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.587762 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.587592 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.587787 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.598979 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.612190 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.629991 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.641020 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.690384 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.690444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.690461 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.690487 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.690505 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.793822 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.793865 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.793874 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.793889 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.793900 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.896450 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.896496 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.896514 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.896539 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.896577 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.949954 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.950031 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:52 crc kubenswrapper[4822]: E1201 06:51:52.950101 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:52 crc kubenswrapper[4822]: E1201 06:51:52.950199 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.999173 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.999219 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.999232 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.999247 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:52 crc kubenswrapper[4822]: I1201 06:51:52.999260 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:52Z","lastTransitionTime":"2025-12-01T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.101875 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.101910 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.101918 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.101933 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.101944 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.205199 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.205253 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.205265 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.205321 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.205334 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.307819 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.307868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.307880 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.307896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.307909 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.385126 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/0.log" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.385221 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerStarted","Data":"286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.403572 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.410395 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.410429 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.410439 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.410457 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.410469 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.419179 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.432430 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.442835 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.462939 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.474164 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.488386 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:51Z\\\",\\\"message\\\":\\\"2025-12-01T06:51:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233\\\\n2025-12-01T06:51:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233 to /host/opt/cni/bin/\\\\n2025-12-01T06:51:06Z [verbose] multus-daemon started\\\\n2025-12-01T06:51:06Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:51:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.504032 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.515594 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.515645 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.515663 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.515687 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.515704 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.520105 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.535469 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.549115 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.566657 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.581016 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.595146 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.609386 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.618942 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.618971 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.618979 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.618994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.619003 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.620099 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.630074 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.720970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.721038 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.721050 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.721065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.721076 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.824065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.824154 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.824172 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.824195 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.824211 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.926535 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.926602 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.926613 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.926632 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.926644 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:53Z","lastTransitionTime":"2025-12-01T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.950461 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:53 crc kubenswrapper[4822]: I1201 06:51:53.950513 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:53 crc kubenswrapper[4822]: E1201 06:51:53.950650 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:53 crc kubenswrapper[4822]: E1201 06:51:53.950907 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.029748 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.029815 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.029825 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.029856 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.029874 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.132814 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.132866 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.132881 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.132903 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.132917 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.236345 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.236405 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.236418 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.236441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.236454 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.339162 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.339205 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.339214 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.339235 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.339246 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.441854 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.441911 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.441929 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.441955 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.441974 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.544814 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.544895 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.544919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.544953 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.544976 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.648126 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.648186 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.648206 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.648227 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.648243 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.751896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.752352 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.752441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.752520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.752608 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.856832 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.857468 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.857623 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.857718 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.857826 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.950515 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.950672 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:54 crc kubenswrapper[4822]: E1201 06:51:54.951606 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:54 crc kubenswrapper[4822]: E1201 06:51:54.951858 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.961680 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.962088 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.962173 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.962323 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.962406 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:54Z","lastTransitionTime":"2025-12-01T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.968539 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.985328 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:54 crc kubenswrapper[4822]: I1201 06:51:54.998721 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.014970 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.032924 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.046387 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.064895 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.065638 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.065713 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.065730 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.065759 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.065778 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.094467 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.110721 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.127925 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.144975 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.163348 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.169445 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.169498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.169516 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.169543 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.169588 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.183778 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.204879 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:51Z\\\",\\\"message\\\":\\\"2025-12-01T06:51:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233\\\\n2025-12-01T06:51:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233 to /host/opt/cni/bin/\\\\n2025-12-01T06:51:06Z [verbose] multus-daemon started\\\\n2025-12-01T06:51:06Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:51:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.224577 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.245458 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.260898 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.272118 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.272150 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.272159 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.272175 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.272194 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.375265 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.375291 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.375301 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.375312 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.375320 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.477844 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.477921 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.477950 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.477984 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.478007 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.581249 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.581381 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.581401 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.581435 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.581455 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.684629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.684705 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.684727 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.684770 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.684793 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.787837 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.787922 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.787942 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.787976 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.787995 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.891181 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.891226 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.891255 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.891273 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.891401 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:55Z","lastTransitionTime":"2025-12-01T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.950308 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:55 crc kubenswrapper[4822]: I1201 06:51:55.950415 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:55 crc kubenswrapper[4822]: E1201 06:51:55.951301 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:55 crc kubenswrapper[4822]: E1201 06:51:55.951423 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.030125 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.030162 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.030175 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.030191 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.030202 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.133179 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.133226 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.133236 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.133254 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.133266 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.236086 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.236149 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.236165 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.236189 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.236204 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.339660 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.339724 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.339742 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.339773 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.339785 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.442621 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.442915 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.442984 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.443057 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.443146 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.546448 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.546891 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.546965 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.547035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.547102 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.649872 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.650128 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.650218 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.650304 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.650390 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.753494 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.753882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.754058 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.754196 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.754331 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.857646 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.857681 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.857691 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.857708 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.857719 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.950905 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.950917 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:56 crc kubenswrapper[4822]: E1201 06:51:56.951159 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:56 crc kubenswrapper[4822]: E1201 06:51:56.951262 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.960402 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.960459 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.960472 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.960493 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.960509 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:56Z","lastTransitionTime":"2025-12-01T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:56 crc kubenswrapper[4822]: I1201 06:51:56.963850 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.063639 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.064159 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.064362 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.064634 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.064793 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.168233 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.168296 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.168313 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.168338 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.168355 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.271473 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.271534 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.271584 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.271621 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.271640 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.374633 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.374813 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.374837 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.374863 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.374882 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.478036 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.478092 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.478109 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.478135 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.478153 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.581651 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.581694 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.581703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.581721 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.581734 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.685372 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.685699 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.685761 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.685830 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.685888 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.789824 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.789906 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.789920 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.789950 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.789969 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.893058 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.893118 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.893129 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.893148 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.893162 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.949874 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:57 crc kubenswrapper[4822]: E1201 06:51:57.950051 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.950229 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:57 crc kubenswrapper[4822]: E1201 06:51:57.950417 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.996948 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.997041 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.997061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.997097 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:57 crc kubenswrapper[4822]: I1201 06:51:57.997134 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:57Z","lastTransitionTime":"2025-12-01T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.100483 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.100585 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.100603 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.100629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.100648 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.204009 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.204073 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.204092 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.204118 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.204135 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.215236 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.215288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.215306 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.215328 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.215344 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.230300 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.234972 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.235038 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.235051 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.235071 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.235088 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.248035 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.254019 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.254065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.254082 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.254104 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.254122 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.281723 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.287228 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.287294 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.287308 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.287330 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.287345 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.304975 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.310013 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.310044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.310057 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.310076 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.310087 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.322585 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:51:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.322720 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.324617 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.324650 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.324661 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.324683 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.324696 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.427311 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.427375 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.427392 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.427419 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.427439 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.531023 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.531089 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.531106 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.531133 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.531154 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.635228 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.635299 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.635320 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.635350 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.635376 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.738478 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.738522 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.738538 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.738585 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.738603 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.841544 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.841610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.841622 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.841644 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.841658 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.944485 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.944543 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.944599 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.944625 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.944642 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:58Z","lastTransitionTime":"2025-12-01T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.950038 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.950515 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:51:58 crc kubenswrapper[4822]: I1201 06:51:58.950236 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:51:58 crc kubenswrapper[4822]: E1201 06:51:58.950931 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.048515 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.048611 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.048630 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.048656 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.048679 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.153425 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.153493 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.153511 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.153535 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.153577 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.256746 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.256812 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.256834 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.256858 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.256880 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.360198 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.360274 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.360293 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.361183 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.361242 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.464356 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.464454 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.464477 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.464510 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.464530 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.567275 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.567336 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.567361 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.567391 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.567414 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.669968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.670026 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.670043 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.670067 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.670085 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.773956 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.774021 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.774040 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.774067 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.774102 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.877502 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.878061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.878206 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.878658 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.878804 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.949738 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:51:59 crc kubenswrapper[4822]: E1201 06:51:59.949976 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.950227 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:51:59 crc kubenswrapper[4822]: E1201 06:51:59.950326 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.951958 4822 scope.go:117] "RemoveContainer" containerID="2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.982397 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.982446 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.982463 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.982486 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:51:59 crc kubenswrapper[4822]: I1201 06:51:59.982505 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:51:59Z","lastTransitionTime":"2025-12-01T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.085841 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.085936 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.085953 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.085977 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.085994 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.194611 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.195377 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.195580 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.195624 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.195647 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.299467 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.299610 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.299642 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.299675 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.299697 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.402734 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.402802 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.402825 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.402853 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.402876 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.418874 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/2.log" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.422153 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.422740 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.446833 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.466522 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.481278 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.493021 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"20fbf721-60a7-48aa-8b68-20b6daa76e55\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9cbbbce6b94ff404ff8fff6133b99caf125a800cd26bda660ca6fd977008b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.505656 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.505687 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.505697 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.505712 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.505725 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.505727 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.518777 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.529105 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.544446 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.589875 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.607303 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.608495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.608532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.608544 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.608573 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.608584 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.636326 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.649424 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.664399 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.677829 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.691648 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.712292 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.712349 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.712361 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.712382 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.712396 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.714061 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:51Z\\\",\\\"message\\\":\\\"2025-12-01T06:51:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233\\\\n2025-12-01T06:51:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233 to /host/opt/cni/bin/\\\\n2025-12-01T06:51:06Z [verbose] multus-daemon started\\\\n2025-12-01T06:51:06Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:51:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.730978 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.744749 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.815876 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.815951 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.815973 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.816002 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.816018 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.919533 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.919640 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.919651 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.919670 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.919681 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:00Z","lastTransitionTime":"2025-12-01T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.950475 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:00 crc kubenswrapper[4822]: I1201 06:52:00.950508 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:00 crc kubenswrapper[4822]: E1201 06:52:00.950807 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:00 crc kubenswrapper[4822]: E1201 06:52:00.950980 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.022625 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.022678 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.022695 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.022717 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.022734 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.127669 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.127734 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.127760 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.127788 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.127811 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.230709 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.230755 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.230770 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.230791 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.230805 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.332958 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.333234 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.333373 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.333482 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.333629 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.435886 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.436245 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.436437 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.436661 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.436853 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.539385 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.539452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.539475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.539510 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.539532 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.643163 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.643239 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.643258 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.643283 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.643301 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.747119 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.747186 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.747204 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.747227 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.747245 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.850650 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.850692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.850703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.850719 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.850729 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.950709 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.950816 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:01 crc kubenswrapper[4822]: E1201 06:52:01.950964 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:01 crc kubenswrapper[4822]: E1201 06:52:01.951154 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.953476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.953519 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.953532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.953578 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:01 crc kubenswrapper[4822]: I1201 06:52:01.953595 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:01Z","lastTransitionTime":"2025-12-01T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.056669 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.056759 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.056836 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.056872 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.056898 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.159965 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.160070 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.160094 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.160126 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.160148 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.263542 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.263692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.263714 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.263745 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.263764 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.367078 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.367132 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.367143 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.367163 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.367179 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.431708 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/3.log" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.432783 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/2.log" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.438005 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" exitCode=1 Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.438081 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.438150 4822 scope.go:117] "RemoveContainer" containerID="2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.439633 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 06:52:02 crc kubenswrapper[4822]: E1201 06:52:02.439973 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.460581 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.473903 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.473958 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.473980 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.474006 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.474024 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.489966 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.511392 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.530701 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.554474 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.578303 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.578365 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.578382 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.578406 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.578425 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.578710 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.598718 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.623200 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:51Z\\\",\\\"message\\\":\\\"2025-12-01T06:51:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233\\\\n2025-12-01T06:51:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233 to /host/opt/cni/bin/\\\\n2025-12-01T06:51:06Z [verbose] multus-daemon started\\\\n2025-12-01T06:51:06Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:51:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.643215 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.665658 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.681440 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.681510 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.681532 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.681592 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.681614 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.685414 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.702340 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.719076 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"20fbf721-60a7-48aa-8b68-20b6daa76e55\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9cbbbce6b94ff404ff8fff6133b99caf125a800cd26bda660ca6fd977008b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.740286 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.761696 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.776858 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.784539 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.784604 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.784618 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.784641 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.784660 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.802807 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.832002 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:52:01Z\\\",\\\"message\\\":\\\"e_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.219\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1201 06:52:01.357980 6809 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.887239 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.887284 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.887297 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.887318 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.887336 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.950888 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:02 crc kubenswrapper[4822]: E1201 06:52:02.951082 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.951442 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:02 crc kubenswrapper[4822]: E1201 06:52:02.951745 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.990402 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.990463 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.990484 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.990510 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:02 crc kubenswrapper[4822]: I1201 06:52:02.990531 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:02Z","lastTransitionTime":"2025-12-01T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.100513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.100629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.100655 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.100685 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.100709 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.221191 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.221264 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.221281 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.221307 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.221325 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.324523 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.324631 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.324653 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.324682 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.324705 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.428291 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.428380 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.428407 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.428444 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.428469 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.445470 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/3.log" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.531428 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.531470 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.531481 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.531500 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.531516 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.634631 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.634667 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.634677 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.634692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.634704 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.737287 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.737334 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.737351 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.737372 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.737389 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.840863 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.840941 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.840959 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.840992 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.841016 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.945021 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.945452 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.945674 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.945878 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.946056 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:03Z","lastTransitionTime":"2025-12-01T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.950347 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:03 crc kubenswrapper[4822]: I1201 06:52:03.950496 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:03 crc kubenswrapper[4822]: E1201 06:52:03.950750 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:03 crc kubenswrapper[4822]: E1201 06:52:03.950936 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.049399 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.049776 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.049952 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.050120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.050284 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.154576 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.154634 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.154649 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.154674 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.154695 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.258368 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.258846 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.258863 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.258885 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.258905 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.362052 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.362441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.362602 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.362823 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.363023 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.465580 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.465629 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.465640 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.465659 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.465670 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.568236 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.568298 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.568323 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.568357 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.568379 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.671127 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.671180 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.671197 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.671221 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.671238 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.774986 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.775685 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.775882 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.776028 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.776148 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.879806 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.879870 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.879900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.879925 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.879943 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.950824 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:04 crc kubenswrapper[4822]: E1201 06:52:04.951039 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.951354 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:04 crc kubenswrapper[4822]: E1201 06:52:04.951596 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.970595 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f02cc583-763a-4279-84fd-2d6b561fb11a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae4bf455ec0d480bd48e74fcaf4ad42931539fee7d410a7a8077d36509e65f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feb00aa88bbe061723e74aa38f7658f5e647701f5ccdc234ec1f3c5c2a2f5f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtgpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:18Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-cp777\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.982904 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.983155 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.983335 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.983541 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.983789 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:04Z","lastTransitionTime":"2025-12-01T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:04 crc kubenswrapper[4822]: I1201 06:52:04.993844 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1519f97-7402-44a5-8979-b605e3957d76\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"193623 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1201 06:51:03.193636 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1201 06:51:03.193543 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1643679467/tls.crt::/tmp/serving-cert-1643679467/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764571847\\\\\\\\\\\\\\\" (2025-12-01 06:50:46 +0000 UTC to 2025-12-31 06:50:47 +0000 UTC (now=2025-12-01 06:51:03.19347412 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193850 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764571857\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764571857\\\\\\\\\\\\\\\" (2025-12-01 05:50:57 +0000 UTC to 2026-12-01 05:50:57 +0000 UTC (now=2025-12-01 06:51:03.19382313 +0000 UTC))\\\\\\\"\\\\nI1201 06:51:03.193852 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1201 06:51:03.193873 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1201 06:51:03.193886 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1201 06:51:03.193905 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1201 06:51:03.194094 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1201 06:51:03.194190 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1201 06:51:03.194210 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1201 06:51:03.194510 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.013088 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac5dbbc-b1b5-4883-91a1-33b2f37fbcdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://819dc8682ad96a5fdf5289e787e8eb78384c82bc5b9b1c22edce066a0dfd2e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6ad124208fd342e5fbbbf37060adfc8957b1f2d49ceec31dca9ebe8ca7de200\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24a233b15f77e45d93e5c2e3bc42ea1ccee014937d96e02c013d56c1fa737428\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.029461 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4c05c16e-eb24-4a18-b222-f4a26288f012\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://260a537446b704d0fdb98d6a6a707ffdfb8e067c0f8908efd517a182f4536fb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df4d34c9143b9197957269141bafa327acfff4c505d0a844387d36cda5812d91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9449923af3dd5340320e4d63aa4f7a4c1398411cb97f20b117bd5d90bef5f2f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d64116d515094566da376bbf3c338d9c3b3830f2be760c524c580bd1714446ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.050696 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.070317 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4233b19ca2415519a32f1721291ca17c90ccef5243c79bd364af0c94f0b8f008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdb08db6e6c5d0865cabccb5c8b38e99e4b04f790a8730467c9cc7fa89f1082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.084070 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://130fb51940e71a8ff11388d939f48b3aeaf7b8ed720028189b054dabf658944b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.086411 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.086443 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.086454 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.086475 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.086510 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.099335 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-b6tpr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34b58185-4742-4187-9243-860433c413d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:51Z\\\",\\\"message\\\":\\\"2025-12-01T06:51:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233\\\\n2025-12-01T06:51:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ee2addac-e384-44f0-aad5-8122e5b0f233 to /host/opt/cni/bin/\\\\n2025-12-01T06:51:06Z [verbose] multus-daemon started\\\\n2025-12-01T06:51:06Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:51:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jq4w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-b6tpr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.113807 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80225810-9d72-45b0-980f-1cb242d987e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jql47\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:19Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-lk8mq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.128871 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d993b9dac2a7d4d0bb709d4a15c5efa9ae3f16dd7d7c7702097d098189ffd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.140821 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6c6a838-3829-4058-aa59-1302d07e4507\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5caf99b506baf0be4cb3c4c14bbad89e650e576867492e4bffc26c685042cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x9m8d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2cz64\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.154913 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cv8h4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e28e62c9-6cb1-4d0f-a448-d7c249c20bb5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4bfb76e0786f2783a25629f31ad65365c31a6825ecfd1feae689eed19353db7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wgmqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:08Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cv8h4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.166323 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"20fbf721-60a7-48aa-8b68-20b6daa76e55\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9cbbbce6b94ff404ff8fff6133b99caf125a800cd26bda660ca6fd977008b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90b993a164e48229e710d3ddb0ffb4f905efd13ce48fa9b61907fe1483d7b69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:50:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.181652 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.189421 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.189498 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.189538 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.189628 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.189652 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.198166 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.211343 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-8v9xh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e244f28-ce62-4ba3-bc7f-292a39725ae6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dcc0d4cf57724d5614383ece02b68365c0813a924d91dca235e3aa13877f16e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r8rm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-8v9xh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.231353 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ebc3e4ad-c394-405c-ac35-c77290463348\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ab4518283965303349647e4689824910d8e8893a3fc08c3297e8412c374eda9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1e695b1e3c2c6006d6da424aedcc44ee3c64e002543edf431da990eb442e01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c40c66f42095d73d67462c094577d30449f49a5d99f9381ee0d57ca930355bec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc83734ac90f7d26364625812b0157966e2e75594022e505d64dfc38f11921b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://176517994f0592c1acde309a839ecc5bed6d9bcb90f896630bf78951caf882b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06d8b9b0aa675ef399f328fbb4e40e749c9ab07af0df3c3ab56e0a95b90d06c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc94f554fce1d13bcea337fd22c8394487114cb652b0c205b4ceb97a31625ef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5b96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kb9ml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.260299 4822 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8284d339-ff12-453a-be42-4540e44252ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:51:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2538ff76434a4be474003d80572e20321927f0a02094119e80cbf2767a45c193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:51:32Z\\\",\\\"message\\\":\\\"/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 06:51:32.005099 6449 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:51:32.005113 6449 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:51:32.005134 6449 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:51:32.005158 6449 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:51:32.005174 6449 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 06:51:32.005166 6449 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:51:32.005198 6449 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:51:32.005212 6449 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 06:51:32.005211 6449 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:51:32.005180 6449 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:51:32.005245 6449 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:51:32.005277 6449 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:51:32.005355 6449 factory.go:656] Stopping watch factory\\\\nI1201 06:51:32.005385 6449 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:51:32.005407 6449 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:31Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:52:01Z\\\",\\\"message\\\":\\\"e_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.219\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1201 06:52:01.357980 6809 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:52:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:51:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:51:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vlfwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:51:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-75mdq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:05Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.291612 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.291678 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.291698 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.291725 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.291743 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.395185 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.395283 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.395300 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.395322 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.395340 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.497865 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.497930 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.497947 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.497970 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.497988 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.601441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.601501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.601518 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.601543 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.601595 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.705105 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.705171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.705190 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.705217 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.705234 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.808440 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.808546 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.808615 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.808638 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.808658 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.912045 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.912088 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.912102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.912120 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.912132 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:05Z","lastTransitionTime":"2025-12-01T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.950514 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:05 crc kubenswrapper[4822]: E1201 06:52:05.950714 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:05 crc kubenswrapper[4822]: I1201 06:52:05.950514 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:05 crc kubenswrapper[4822]: E1201 06:52:05.950875 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.014729 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.014798 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.014819 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.014847 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.014900 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.117874 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.117953 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.117977 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.118008 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.118034 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.221583 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.221685 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.221712 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.221782 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.221801 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.325071 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.325109 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.325117 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.325133 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.325144 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.428924 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.428987 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.429003 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.429030 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.429048 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.532159 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.532201 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.532209 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.532221 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.532230 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.637160 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.637235 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.637259 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.637308 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.637334 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.740758 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.740821 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.740910 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.740987 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.741017 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.844508 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.844609 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.844634 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.844663 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.844688 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.948174 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.948254 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.948278 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.948308 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.948329 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:06Z","lastTransitionTime":"2025-12-01T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.950655 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:06 crc kubenswrapper[4822]: E1201 06:52:06.950925 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:06 crc kubenswrapper[4822]: I1201 06:52:06.950989 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:06 crc kubenswrapper[4822]: E1201 06:52:06.951151 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.057894 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.057939 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.057948 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.057965 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.057976 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.159943 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.159999 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.160018 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.160035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.160047 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.262727 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.262790 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.262809 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.262835 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.262855 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.367189 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.367275 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.367312 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.367349 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.367372 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.470852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.470919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.470938 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.470968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.470988 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.574818 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.574877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.574896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.574918 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.574936 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.677782 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.677876 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.677894 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.677920 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.677938 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.781004 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.781065 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.781084 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.781109 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.781129 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.883889 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.883955 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.883981 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.884009 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.884031 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.950699 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.950726 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.950894 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.951155 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.965163 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.965277 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.965327 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.965290869 +0000 UTC m=+147.286098605 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.965385 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.965424 4822 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.965539 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.965516495 +0000 UTC m=+147.286324221 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.965626 4822 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:52:07 crc kubenswrapper[4822]: E1201 06:52:07.965719 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.96569979 +0000 UTC m=+147.286507506 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.987437 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.987494 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.987516 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.987546 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:07 crc kubenswrapper[4822]: I1201 06:52:07.987612 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:07Z","lastTransitionTime":"2025-12-01T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.066810 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.066931 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067075 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067099 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067110 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067120 4822 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067142 4822 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067159 4822 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067197 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:12.067178197 +0000 UTC m=+147.387985923 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.067224 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:12.067211348 +0000 UTC m=+147.388019074 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.091009 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.091081 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.091114 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.091144 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.091165 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.193951 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.194008 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.194024 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.194045 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.194063 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.296921 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.297007 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.297031 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.297061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.297084 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.400082 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.400121 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.400132 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.400148 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.400161 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.503700 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.503766 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.503783 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.503809 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.503826 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.607757 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.607832 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.607850 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.607876 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.607934 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.711079 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.711145 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.711163 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.711192 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.711209 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.720040 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.720094 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.720113 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.720135 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.720151 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.741230 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.747226 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.747315 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.747332 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.747361 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.747392 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.769992 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.776384 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.776469 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.776495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.776520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.776539 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.798408 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.803283 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.803327 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.803344 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.803369 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.803387 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.824495 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.829877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.829946 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.829966 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.829991 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.830008 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.850591 4822 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"315fcd00-0c1c-414e-8eb4-b46c25a26f75\\\",\\\"systemUUID\\\":\\\"531638cf-8ff2-40bb-b69e-10a6b05dc0e6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.850826 4822 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.853404 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.853468 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.853487 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.853515 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.853532 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.950356 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.950626 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.950386 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:08 crc kubenswrapper[4822]: E1201 06:52:08.950979 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.957418 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.957481 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.957508 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.957601 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:08 crc kubenswrapper[4822]: I1201 06:52:08.957629 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:08Z","lastTransitionTime":"2025-12-01T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.060458 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.060505 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.060513 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.060530 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.060540 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.164382 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.164462 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.164479 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.164500 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.164518 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.267300 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.267362 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.267381 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.267406 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.267424 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.371883 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.371947 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.371965 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.372024 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.372043 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.475607 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.475703 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.475722 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.475745 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.475762 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.580211 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.580287 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.580322 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.580350 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.580366 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.683533 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.683668 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.683695 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.683728 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.683750 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.786891 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.786966 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.787005 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.787036 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.787060 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.890727 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.890780 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.890797 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.890819 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.890836 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.950753 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.950753 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:09 crc kubenswrapper[4822]: E1201 06:52:09.950952 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:09 crc kubenswrapper[4822]: E1201 06:52:09.951094 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.993848 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.993908 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.993926 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.993949 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:09 crc kubenswrapper[4822]: I1201 06:52:09.993968 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:09Z","lastTransitionTime":"2025-12-01T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.096921 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.096978 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.096994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.098288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.098364 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.201393 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.201453 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.201471 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.201495 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.201514 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.305138 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.305197 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.305216 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.305242 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.305260 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.409189 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.409280 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.409299 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.409330 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.409352 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.512689 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.512756 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.512782 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.512868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.512897 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.616315 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.616396 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.616419 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.616448 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.616469 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.719861 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.719924 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.719946 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.719975 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.719998 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.823102 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.823179 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.823201 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.823228 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.823246 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.926039 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.926090 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.926104 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.926122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.926300 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:10Z","lastTransitionTime":"2025-12-01T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.950776 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:10 crc kubenswrapper[4822]: I1201 06:52:10.950829 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:10 crc kubenswrapper[4822]: E1201 06:52:10.950986 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:10 crc kubenswrapper[4822]: E1201 06:52:10.951283 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.029994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.030040 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.030059 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.030083 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.030101 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.133470 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.133536 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.133592 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.133617 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.133636 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.237228 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.237288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.237305 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.237330 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.237350 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.339892 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.339954 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.339971 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.339994 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.340012 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.442855 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.442916 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.442932 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.442968 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.443004 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.546675 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.546753 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.546775 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.546801 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.546819 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.650753 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.650808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.650827 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.650852 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.650870 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.753821 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.753880 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.753897 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.753919 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.753937 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.856934 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.856992 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.857009 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.857032 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.857052 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.988672 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.988912 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:11 crc kubenswrapper[4822]: E1201 06:52:11.989509 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:11 crc kubenswrapper[4822]: E1201 06:52:11.989601 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.992276 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.992313 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.992325 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.992341 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:11 crc kubenswrapper[4822]: I1201 06:52:11.992352 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:11Z","lastTransitionTime":"2025-12-01T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.095165 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.095217 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.095235 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.095259 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.095277 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.199253 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.199326 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.199344 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.199371 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.199390 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.301945 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.302078 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.302099 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.302122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.302139 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.405139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.405205 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.405222 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.405246 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.405263 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.507631 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.507692 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.507715 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.507742 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.507764 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.611458 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.611521 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.611539 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.611589 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.611607 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.714044 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.714122 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.714159 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.714188 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.714209 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.817088 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.817180 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.817205 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.817237 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.817261 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.921405 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.921473 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.921491 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.921516 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.921535 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:12Z","lastTransitionTime":"2025-12-01T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.950846 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.950902 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:12 crc kubenswrapper[4822]: E1201 06:52:12.951133 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:12 crc kubenswrapper[4822]: E1201 06:52:12.951261 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:12 crc kubenswrapper[4822]: I1201 06:52:12.975772 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.025039 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.025096 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.025110 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.025134 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.025150 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.128814 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.128880 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.128896 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.128917 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.128934 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.231755 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.231877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.231903 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.231933 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.231955 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.335723 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.335785 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.335801 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.335824 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.335844 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.438710 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.438782 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.438802 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.438826 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.438842 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.541746 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.541811 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.541831 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.541857 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.541875 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.645435 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.645572 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.645591 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.645614 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.645632 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.749250 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.749411 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.749441 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.749512 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.749539 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.853010 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.853086 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.853113 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.853143 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.853166 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.950700 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.950724 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:13 crc kubenswrapper[4822]: E1201 06:52:13.950926 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:13 crc kubenswrapper[4822]: E1201 06:52:13.951134 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.956359 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.956449 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.956483 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.956509 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:13 crc kubenswrapper[4822]: I1201 06:52:13.956530 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:13Z","lastTransitionTime":"2025-12-01T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.059653 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.059719 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.059737 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.059760 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.059780 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.163464 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.163619 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.163644 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.163678 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.163701 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.266706 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.266804 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.266823 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.266858 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.266878 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.369781 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.369877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.369905 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.369928 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.369942 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.473606 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.473672 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.473683 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.473706 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.473720 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.576521 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.576634 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.576660 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.576696 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.576720 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.680035 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.680111 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.680129 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.680155 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.680174 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.784008 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.784070 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.784087 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.784110 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.784128 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.888403 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.888476 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.888497 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.888525 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.888582 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.950299 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:14 crc kubenswrapper[4822]: E1201 06:52:14.951044 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.951156 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:14 crc kubenswrapper[4822]: E1201 06:52:14.951335 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.991661 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.991741 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.991766 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.991795 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:14 crc kubenswrapper[4822]: I1201 06:52:14.991815 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:14Z","lastTransitionTime":"2025-12-01T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.023513 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podStartSLOduration=71.023483003 podStartE2EDuration="1m11.023483003s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.008931389 +0000 UTC m=+90.329739115" watchObservedRunningTime="2025-12-01 06:52:15.023483003 +0000 UTC m=+90.344290729" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.024168 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-cv8h4" podStartSLOduration=71.024154792 podStartE2EDuration="1m11.024154792s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.024117261 +0000 UTC m=+90.344924987" watchObservedRunningTime="2025-12-01 06:52:15.024154792 +0000 UTC m=+90.344962518" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.088284 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-kb9ml" podStartSLOduration=70.088259231 podStartE2EDuration="1m10.088259231s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.054424562 +0000 UTC m=+90.375232258" watchObservedRunningTime="2025-12-01 06:52:15.088259231 +0000 UTC m=+90.409066957" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.097016 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.097217 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.097334 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.097442 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.097584 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.123604 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=19.12353589 podStartE2EDuration="19.12353589s" podCreationTimestamp="2025-12-01 06:51:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.105793418 +0000 UTC m=+90.426601114" watchObservedRunningTime="2025-12-01 06:52:15.12353589 +0000 UTC m=+90.444343616" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.158594 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-8v9xh" podStartSLOduration=71.158539032 podStartE2EDuration="1m11.158539032s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.158033738 +0000 UTC m=+90.478841464" watchObservedRunningTime="2025-12-01 06:52:15.158539032 +0000 UTC m=+90.479346748" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.200500 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.200801 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.200962 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.201085 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.201199 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.226730 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-b6tpr" podStartSLOduration=71.226697063 podStartE2EDuration="1m11.226697063s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.225334786 +0000 UTC m=+90.546142482" watchObservedRunningTime="2025-12-01 06:52:15.226697063 +0000 UTC m=+90.547504799" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.244674 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-cp777" podStartSLOduration=70.244649252 podStartE2EDuration="1m10.244649252s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.244354623 +0000 UTC m=+90.565162339" watchObservedRunningTime="2025-12-01 06:52:15.244649252 +0000 UTC m=+90.565456948" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.266321 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=71.266301353 podStartE2EDuration="1m11.266301353s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.265335786 +0000 UTC m=+90.586143502" watchObservedRunningTime="2025-12-01 06:52:15.266301353 +0000 UTC m=+90.587109049" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.280683 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=65.280665761 podStartE2EDuration="1m5.280665761s" podCreationTimestamp="2025-12-01 06:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.279941931 +0000 UTC m=+90.600749637" watchObservedRunningTime="2025-12-01 06:52:15.280665761 +0000 UTC m=+90.601473467" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.295191 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.295167554 podStartE2EDuration="38.295167554s" podCreationTimestamp="2025-12-01 06:51:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.294165896 +0000 UTC m=+90.614973572" watchObservedRunningTime="2025-12-01 06:52:15.295167554 +0000 UTC m=+90.615975250" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.303987 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.304232 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.304314 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.304393 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.304470 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.346080 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.346053466 podStartE2EDuration="3.346053466s" podCreationTimestamp="2025-12-01 06:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:15.341340695 +0000 UTC m=+90.662148381" watchObservedRunningTime="2025-12-01 06:52:15.346053466 +0000 UTC m=+90.666861182" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.407631 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.407705 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.407723 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.407746 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.407763 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.510779 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.510825 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.510838 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.510857 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.510869 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.613988 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.614041 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.614052 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.614070 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.614085 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.717053 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.717233 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.717309 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.717342 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.717360 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.820706 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.820766 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.820783 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.820809 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.820829 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.923981 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.924042 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.924062 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.924087 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.924105 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:15Z","lastTransitionTime":"2025-12-01T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.950003 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.950109 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:15 crc kubenswrapper[4822]: E1201 06:52:15.950977 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:15 crc kubenswrapper[4822]: E1201 06:52:15.951479 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:15 crc kubenswrapper[4822]: I1201 06:52:15.952119 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 06:52:15 crc kubenswrapper[4822]: E1201 06:52:15.952477 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.027061 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.027118 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.027139 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.027167 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.027189 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.130051 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.130108 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.130124 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.130144 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.130244 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.232741 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.232890 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.232905 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.232928 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.232948 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.336537 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.336642 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.336664 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.336694 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.336716 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.439900 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.439958 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.439976 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.440003 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.440031 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.542708 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.542775 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.542796 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.542825 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.542846 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.648002 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.648074 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.648097 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.648128 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.648147 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.751868 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.751938 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.751953 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.751977 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.751993 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.855017 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.855071 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.855089 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.855115 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.855138 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.950864 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.950897 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:16 crc kubenswrapper[4822]: E1201 06:52:16.951693 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:16 crc kubenswrapper[4822]: E1201 06:52:16.951920 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.963100 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.963151 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.963168 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.963193 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:16 crc kubenswrapper[4822]: I1201 06:52:16.963210 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:16Z","lastTransitionTime":"2025-12-01T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.066781 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.066857 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.066877 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.066907 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.066928 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.170006 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.170096 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.170113 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.170193 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.170212 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.273508 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.273592 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.273607 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.273651 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.273671 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.377269 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.377353 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.377375 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.377415 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.377440 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.481348 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.481967 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.482147 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.482308 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.482434 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.585209 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.585266 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.585283 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.585308 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.585328 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.688845 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.689171 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.689314 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.689533 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.689734 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.794479 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.794528 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.794545 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.794611 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.794630 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.896989 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.897059 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.897129 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.897154 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.897172 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:17Z","lastTransitionTime":"2025-12-01T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.950860 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:17 crc kubenswrapper[4822]: I1201 06:52:17.950936 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:17 crc kubenswrapper[4822]: E1201 06:52:17.951110 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:17 crc kubenswrapper[4822]: E1201 06:52:17.951266 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.000445 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.000499 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.000520 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.000547 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.000617 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.104230 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.104288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.104305 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.104328 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.104344 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.207416 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.207501 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.207542 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.207615 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.207642 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.310823 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.310898 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.310915 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.310940 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.310957 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.414808 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.414890 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.414915 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.414948 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.414971 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.518481 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.518576 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.518596 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.518627 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.518646 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.622179 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.622274 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.622302 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.622333 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.622358 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.725828 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.725889 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.725906 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.725930 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.725951 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.828218 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.828261 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.828272 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.828288 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.828300 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.931148 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.931198 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.931274 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.931305 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.931328 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.949751 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:18 crc kubenswrapper[4822]: E1201 06:52:18.949893 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.949987 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:18 crc kubenswrapper[4822]: E1201 06:52:18.950256 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.973510 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.973567 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.973576 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.973588 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:52:18 crc kubenswrapper[4822]: I1201 06:52:18.973600 4822 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:52:18Z","lastTransitionTime":"2025-12-01T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.024446 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn"] Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.025186 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.027138 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.028458 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.028324 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.028269 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.095478 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b4ca584-4429-4839-8a4f-7fca097d161a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.095532 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b4ca584-4429-4839-8a4f-7fca097d161a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.095576 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b4ca584-4429-4839-8a4f-7fca097d161a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.095600 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0b4ca584-4429-4839-8a4f-7fca097d161a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.095753 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0b4ca584-4429-4839-8a4f-7fca097d161a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197302 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b4ca584-4429-4839-8a4f-7fca097d161a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197424 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b4ca584-4429-4839-8a4f-7fca097d161a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197465 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b4ca584-4429-4839-8a4f-7fca097d161a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197503 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0b4ca584-4429-4839-8a4f-7fca097d161a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197538 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0b4ca584-4429-4839-8a4f-7fca097d161a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197643 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0b4ca584-4429-4839-8a4f-7fca097d161a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.197698 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0b4ca584-4429-4839-8a4f-7fca097d161a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.198467 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b4ca584-4429-4839-8a4f-7fca097d161a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.204497 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b4ca584-4429-4839-8a4f-7fca097d161a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.217947 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b4ca584-4429-4839-8a4f-7fca097d161a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9f2gn\" (UID: \"0b4ca584-4429-4839-8a4f-7fca097d161a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.350865 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.506394 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" event={"ID":"0b4ca584-4429-4839-8a4f-7fca097d161a","Type":"ContainerStarted","Data":"cad8306006bf8a1b32fdf0d77bc0c187694d64314c74a839e4420f13850f8767"} Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.950178 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:19 crc kubenswrapper[4822]: I1201 06:52:19.950185 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:19 crc kubenswrapper[4822]: E1201 06:52:19.950635 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:19 crc kubenswrapper[4822]: E1201 06:52:19.950680 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:20 crc kubenswrapper[4822]: I1201 06:52:20.511486 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" event={"ID":"0b4ca584-4429-4839-8a4f-7fca097d161a","Type":"ContainerStarted","Data":"a579f1b3b8365bf225f936469d4f0168d0475d6caa3f0272c9f565aafa25bad4"} Dec 01 06:52:20 crc kubenswrapper[4822]: I1201 06:52:20.950448 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:20 crc kubenswrapper[4822]: I1201 06:52:20.950529 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:20 crc kubenswrapper[4822]: E1201 06:52:20.950723 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:20 crc kubenswrapper[4822]: E1201 06:52:20.951099 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:21 crc kubenswrapper[4822]: I1201 06:52:21.950198 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:21 crc kubenswrapper[4822]: E1201 06:52:21.950296 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:21 crc kubenswrapper[4822]: I1201 06:52:21.950372 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:21 crc kubenswrapper[4822]: E1201 06:52:21.950791 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:22 crc kubenswrapper[4822]: I1201 06:52:22.950840 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:22 crc kubenswrapper[4822]: E1201 06:52:22.951101 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:22 crc kubenswrapper[4822]: I1201 06:52:22.951170 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:22 crc kubenswrapper[4822]: E1201 06:52:22.951356 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:23 crc kubenswrapper[4822]: I1201 06:52:23.652411 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:23 crc kubenswrapper[4822]: E1201 06:52:23.652741 4822 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:52:23 crc kubenswrapper[4822]: E1201 06:52:23.652924 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs podName:80225810-9d72-45b0-980f-1cb242d987e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:27.6528876 +0000 UTC m=+162.973695316 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs") pod "network-metrics-daemon-lk8mq" (UID: "80225810-9d72-45b0-980f-1cb242d987e8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:52:23 crc kubenswrapper[4822]: I1201 06:52:23.950234 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:23 crc kubenswrapper[4822]: I1201 06:52:23.950256 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:23 crc kubenswrapper[4822]: E1201 06:52:23.950442 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:23 crc kubenswrapper[4822]: E1201 06:52:23.950639 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:24 crc kubenswrapper[4822]: I1201 06:52:24.950758 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:24 crc kubenswrapper[4822]: E1201 06:52:24.952759 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:24 crc kubenswrapper[4822]: I1201 06:52:24.952888 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:24 crc kubenswrapper[4822]: E1201 06:52:24.953102 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:25 crc kubenswrapper[4822]: I1201 06:52:25.950744 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:25 crc kubenswrapper[4822]: I1201 06:52:25.950791 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:25 crc kubenswrapper[4822]: E1201 06:52:25.950917 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:25 crc kubenswrapper[4822]: E1201 06:52:25.951048 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:26 crc kubenswrapper[4822]: I1201 06:52:26.950363 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:26 crc kubenswrapper[4822]: E1201 06:52:26.950590 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:26 crc kubenswrapper[4822]: I1201 06:52:26.950596 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:26 crc kubenswrapper[4822]: E1201 06:52:26.951304 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:27 crc kubenswrapper[4822]: I1201 06:52:27.950501 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:27 crc kubenswrapper[4822]: E1201 06:52:27.950740 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:27 crc kubenswrapper[4822]: I1201 06:52:27.950769 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:27 crc kubenswrapper[4822]: E1201 06:52:27.950909 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:27 crc kubenswrapper[4822]: I1201 06:52:27.952157 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 06:52:27 crc kubenswrapper[4822]: E1201 06:52:27.952437 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:52:28 crc kubenswrapper[4822]: I1201 06:52:28.950006 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:28 crc kubenswrapper[4822]: I1201 06:52:28.950227 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:28 crc kubenswrapper[4822]: E1201 06:52:28.950454 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:28 crc kubenswrapper[4822]: E1201 06:52:28.950722 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:29 crc kubenswrapper[4822]: I1201 06:52:29.950376 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:29 crc kubenswrapper[4822]: I1201 06:52:29.950410 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:29 crc kubenswrapper[4822]: E1201 06:52:29.950600 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:29 crc kubenswrapper[4822]: E1201 06:52:29.950717 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:30 crc kubenswrapper[4822]: I1201 06:52:30.950642 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:30 crc kubenswrapper[4822]: E1201 06:52:30.950811 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:30 crc kubenswrapper[4822]: I1201 06:52:30.950946 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:30 crc kubenswrapper[4822]: E1201 06:52:30.951150 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:31 crc kubenswrapper[4822]: I1201 06:52:31.950820 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:31 crc kubenswrapper[4822]: I1201 06:52:31.950863 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:31 crc kubenswrapper[4822]: E1201 06:52:31.951152 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:31 crc kubenswrapper[4822]: E1201 06:52:31.951293 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:32 crc kubenswrapper[4822]: I1201 06:52:32.950043 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:32 crc kubenswrapper[4822]: I1201 06:52:32.950077 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:32 crc kubenswrapper[4822]: E1201 06:52:32.950272 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:32 crc kubenswrapper[4822]: E1201 06:52:32.950666 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:33 crc kubenswrapper[4822]: I1201 06:52:33.950818 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:33 crc kubenswrapper[4822]: I1201 06:52:33.951000 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:33 crc kubenswrapper[4822]: E1201 06:52:33.951024 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:33 crc kubenswrapper[4822]: E1201 06:52:33.951285 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:34 crc kubenswrapper[4822]: I1201 06:52:34.950444 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:34 crc kubenswrapper[4822]: I1201 06:52:34.950502 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:34 crc kubenswrapper[4822]: E1201 06:52:34.952626 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:34 crc kubenswrapper[4822]: E1201 06:52:34.952802 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:35 crc kubenswrapper[4822]: I1201 06:52:35.950289 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:35 crc kubenswrapper[4822]: E1201 06:52:35.950642 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:35 crc kubenswrapper[4822]: I1201 06:52:35.950325 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:35 crc kubenswrapper[4822]: E1201 06:52:35.951112 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:36 crc kubenswrapper[4822]: I1201 06:52:36.950079 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:36 crc kubenswrapper[4822]: E1201 06:52:36.950286 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:36 crc kubenswrapper[4822]: I1201 06:52:36.950394 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:36 crc kubenswrapper[4822]: E1201 06:52:36.950815 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:37 crc kubenswrapper[4822]: I1201 06:52:37.950856 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:37 crc kubenswrapper[4822]: I1201 06:52:37.951896 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:37 crc kubenswrapper[4822]: E1201 06:52:37.952127 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:37 crc kubenswrapper[4822]: E1201 06:52:37.952393 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.593811 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/1.log" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.595307 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/0.log" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.595382 4822 generic.go:334] "Generic (PLEG): container finished" podID="34b58185-4742-4187-9243-860433c413d8" containerID="286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6" exitCode=1 Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.595468 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerDied","Data":"286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6"} Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.595678 4822 scope.go:117] "RemoveContainer" containerID="671e4521aa47523d2dcb103ab710c71ab2eba9e475d7d40f41ee95b577f2540b" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.596640 4822 scope.go:117] "RemoveContainer" containerID="286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6" Dec 01 06:52:38 crc kubenswrapper[4822]: E1201 06:52:38.596915 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-b6tpr_openshift-multus(34b58185-4742-4187-9243-860433c413d8)\"" pod="openshift-multus/multus-b6tpr" podUID="34b58185-4742-4187-9243-860433c413d8" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.625623 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9f2gn" podStartSLOduration=94.625590292 podStartE2EDuration="1m34.625590292s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:20.533587355 +0000 UTC m=+95.854395071" watchObservedRunningTime="2025-12-01 06:52:38.625590292 +0000 UTC m=+113.946398018" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.950524 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:38 crc kubenswrapper[4822]: I1201 06:52:38.950610 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:38 crc kubenswrapper[4822]: E1201 06:52:38.950846 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:38 crc kubenswrapper[4822]: E1201 06:52:38.950953 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:39 crc kubenswrapper[4822]: I1201 06:52:39.602036 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/1.log" Dec 01 06:52:39 crc kubenswrapper[4822]: I1201 06:52:39.950202 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:39 crc kubenswrapper[4822]: I1201 06:52:39.950236 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:39 crc kubenswrapper[4822]: E1201 06:52:39.950710 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:39 crc kubenswrapper[4822]: E1201 06:52:39.950881 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:40 crc kubenswrapper[4822]: I1201 06:52:40.950838 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:40 crc kubenswrapper[4822]: I1201 06:52:40.950919 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:40 crc kubenswrapper[4822]: E1201 06:52:40.951605 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:40 crc kubenswrapper[4822]: E1201 06:52:40.951799 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:40 crc kubenswrapper[4822]: I1201 06:52:40.952147 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 06:52:40 crc kubenswrapper[4822]: E1201 06:52:40.952347 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-75mdq_openshift-ovn-kubernetes(8284d339-ff12-453a-be42-4540e44252ee)\"" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" Dec 01 06:52:41 crc kubenswrapper[4822]: I1201 06:52:41.950400 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:41 crc kubenswrapper[4822]: E1201 06:52:41.951053 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:41 crc kubenswrapper[4822]: I1201 06:52:41.950582 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:41 crc kubenswrapper[4822]: E1201 06:52:41.951608 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:42 crc kubenswrapper[4822]: I1201 06:52:42.950369 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:42 crc kubenswrapper[4822]: I1201 06:52:42.950420 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:42 crc kubenswrapper[4822]: E1201 06:52:42.950736 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:42 crc kubenswrapper[4822]: E1201 06:52:42.950893 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:43 crc kubenswrapper[4822]: I1201 06:52:43.950420 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:43 crc kubenswrapper[4822]: E1201 06:52:43.950682 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:43 crc kubenswrapper[4822]: I1201 06:52:43.950447 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:43 crc kubenswrapper[4822]: E1201 06:52:43.950987 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:44 crc kubenswrapper[4822]: E1201 06:52:44.896969 4822 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 01 06:52:44 crc kubenswrapper[4822]: I1201 06:52:44.951827 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:44 crc kubenswrapper[4822]: I1201 06:52:44.951853 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:44 crc kubenswrapper[4822]: E1201 06:52:44.952026 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:44 crc kubenswrapper[4822]: E1201 06:52:44.952194 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:45 crc kubenswrapper[4822]: E1201 06:52:45.044323 4822 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 06:52:45 crc kubenswrapper[4822]: I1201 06:52:45.950285 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:45 crc kubenswrapper[4822]: I1201 06:52:45.950377 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:45 crc kubenswrapper[4822]: E1201 06:52:45.951291 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:45 crc kubenswrapper[4822]: E1201 06:52:45.951479 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:46 crc kubenswrapper[4822]: I1201 06:52:46.950861 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:46 crc kubenswrapper[4822]: I1201 06:52:46.950967 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:46 crc kubenswrapper[4822]: E1201 06:52:46.951048 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:46 crc kubenswrapper[4822]: E1201 06:52:46.951307 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:47 crc kubenswrapper[4822]: I1201 06:52:47.950147 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:47 crc kubenswrapper[4822]: I1201 06:52:47.950147 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:47 crc kubenswrapper[4822]: E1201 06:52:47.950370 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:47 crc kubenswrapper[4822]: E1201 06:52:47.950477 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:48 crc kubenswrapper[4822]: I1201 06:52:48.950683 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:48 crc kubenswrapper[4822]: I1201 06:52:48.950698 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:48 crc kubenswrapper[4822]: E1201 06:52:48.950843 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:48 crc kubenswrapper[4822]: E1201 06:52:48.950994 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:49 crc kubenswrapper[4822]: I1201 06:52:49.950667 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:49 crc kubenswrapper[4822]: I1201 06:52:49.950819 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:49 crc kubenswrapper[4822]: E1201 06:52:49.950894 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:49 crc kubenswrapper[4822]: E1201 06:52:49.951138 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:50 crc kubenswrapper[4822]: E1201 06:52:50.046390 4822 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 06:52:50 crc kubenswrapper[4822]: I1201 06:52:50.950826 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:50 crc kubenswrapper[4822]: I1201 06:52:50.951010 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:50 crc kubenswrapper[4822]: E1201 06:52:50.951264 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:50 crc kubenswrapper[4822]: I1201 06:52:50.951363 4822 scope.go:117] "RemoveContainer" containerID="286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6" Dec 01 06:52:50 crc kubenswrapper[4822]: E1201 06:52:50.951388 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:51 crc kubenswrapper[4822]: I1201 06:52:51.655822 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/1.log" Dec 01 06:52:51 crc kubenswrapper[4822]: I1201 06:52:51.655929 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerStarted","Data":"1c1cebe76b95f0676ed78339399a49cb488c2817e6e376c7a0fbc8b707ce3c8a"} Dec 01 06:52:51 crc kubenswrapper[4822]: I1201 06:52:51.950844 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:51 crc kubenswrapper[4822]: I1201 06:52:51.950912 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:51 crc kubenswrapper[4822]: E1201 06:52:51.951487 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:51 crc kubenswrapper[4822]: E1201 06:52:51.951706 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:52 crc kubenswrapper[4822]: I1201 06:52:52.950714 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:52 crc kubenswrapper[4822]: I1201 06:52:52.950717 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:52 crc kubenswrapper[4822]: E1201 06:52:52.950939 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:52 crc kubenswrapper[4822]: E1201 06:52:52.951028 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:53 crc kubenswrapper[4822]: I1201 06:52:53.950345 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:53 crc kubenswrapper[4822]: I1201 06:52:53.950396 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:53 crc kubenswrapper[4822]: E1201 06:52:53.950677 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:53 crc kubenswrapper[4822]: E1201 06:52:53.950849 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:54 crc kubenswrapper[4822]: I1201 06:52:54.950271 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:54 crc kubenswrapper[4822]: I1201 06:52:54.950313 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:54 crc kubenswrapper[4822]: E1201 06:52:54.952491 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:54 crc kubenswrapper[4822]: E1201 06:52:54.952625 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:54 crc kubenswrapper[4822]: I1201 06:52:54.953672 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 06:52:55 crc kubenswrapper[4822]: E1201 06:52:55.047354 4822 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.672396 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/3.log" Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.674895 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerStarted","Data":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.675962 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.710850 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podStartSLOduration=110.710831346 podStartE2EDuration="1m50.710831346s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:55.709166359 +0000 UTC m=+131.029974055" watchObservedRunningTime="2025-12-01 06:52:55.710831346 +0000 UTC m=+131.031639042" Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.836801 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-lk8mq"] Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.836914 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:55 crc kubenswrapper[4822]: E1201 06:52:55.836995 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.949765 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:55 crc kubenswrapper[4822]: I1201 06:52:55.949836 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:55 crc kubenswrapper[4822]: E1201 06:52:55.949924 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:55 crc kubenswrapper[4822]: E1201 06:52:55.950027 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:56 crc kubenswrapper[4822]: I1201 06:52:56.950542 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:56 crc kubenswrapper[4822]: E1201 06:52:56.951141 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:57 crc kubenswrapper[4822]: I1201 06:52:57.951011 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:57 crc kubenswrapper[4822]: I1201 06:52:57.951248 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:57 crc kubenswrapper[4822]: I1201 06:52:57.951295 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:57 crc kubenswrapper[4822]: E1201 06:52:57.951437 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:57 crc kubenswrapper[4822]: E1201 06:52:57.951512 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:52:57 crc kubenswrapper[4822]: E1201 06:52:57.951649 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:58 crc kubenswrapper[4822]: I1201 06:52:58.950635 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:52:58 crc kubenswrapper[4822]: E1201 06:52:58.950815 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:52:59 crc kubenswrapper[4822]: I1201 06:52:59.950388 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:52:59 crc kubenswrapper[4822]: I1201 06:52:59.950473 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:52:59 crc kubenswrapper[4822]: I1201 06:52:59.950387 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:52:59 crc kubenswrapper[4822]: E1201 06:52:59.950626 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:52:59 crc kubenswrapper[4822]: E1201 06:52:59.950710 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-lk8mq" podUID="80225810-9d72-45b0-980f-1cb242d987e8" Dec 01 06:52:59 crc kubenswrapper[4822]: E1201 06:52:59.950797 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.208669 4822 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.252308 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nmx5d"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.252815 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.260359 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t4mcw"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.260981 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.261324 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.266023 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.268295 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.269846 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.270845 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.271479 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.272212 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.272483 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.273206 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.273464 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.273967 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.274600 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.274955 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.275326 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.275583 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.279755 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j7hbz"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.280204 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.280525 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9f7zg"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.280585 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.280794 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.280839 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.281649 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.281709 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-j6lgc"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.281983 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.282782 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.283044 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.283317 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.288917 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.289175 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.289528 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.289538 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.289672 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.289714 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.290448 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.292568 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.292940 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.293089 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.293302 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.293440 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.293944 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.294128 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.294337 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.294473 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.294616 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.294759 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.294885 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.295000 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.295165 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.297153 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.297436 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.297648 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.298394 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-jq95c"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.298497 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.314689 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.320715 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6nbbk"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.323417 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.325173 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.327352 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.331073 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.331116 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.333328 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.333403 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.338843 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.340062 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.340814 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.340960 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341054 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341119 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341171 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341260 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341367 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341467 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341551 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341587 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341469 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341680 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341756 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341789 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341824 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.341907 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.343417 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.345935 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.346082 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.346204 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.346936 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.347153 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.347213 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.347565 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.347157 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.347741 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.347908 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.348061 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.348092 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.348182 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.348601 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.349511 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-ptzkb"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.349904 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.350322 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-k9qc2"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.350514 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.350756 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.350822 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.351598 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.352103 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.352564 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4j7rj"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.354428 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.355080 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.355312 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.355963 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.356544 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.357569 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.360496 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.360650 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s9g5t"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.361242 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.361297 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.363013 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.363189 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.363837 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.364982 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hpr4x"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.370241 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.372663 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.373619 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.374460 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.374887 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.376326 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nkhfk"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.376745 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.379601 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380275 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380441 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380674 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380769 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380780 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380860 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.380950 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.381088 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.381246 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.381811 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.381930 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.383200 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nmx5d"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.388818 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.389417 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.389784 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.389911 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.393168 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.393329 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.393499 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.393984 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.394563 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t4mcw"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.409372 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.409667 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.410584 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.410963 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411154 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411287 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411317 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411460 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411604 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411880 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.411943 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.415602 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.422528 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.423326 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.423650 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.423777 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.424843 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ddprm"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.425331 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.425574 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.425742 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.425741 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.428292 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.430407 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5c4b8"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.431097 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.431332 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.431979 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.433740 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.434607 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.436205 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.436590 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441216 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-serving-cert\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441258 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wh8v\" (UniqueName: \"kubernetes.io/projected/e411fb84-4e27-4611-80b5-9fd52e71441e-kube-api-access-5wh8v\") pod \"control-plane-machine-set-operator-78cbb6b69f-qhchn\" (UID: \"e411fb84-4e27-4611-80b5-9fd52e71441e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441281 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441298 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9c5f\" (UniqueName: \"kubernetes.io/projected/c0c87798-bf92-4257-95df-a5aa1c305994-kube-api-access-l9c5f\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441319 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e6164df-7beb-4da5-8853-a0785999d30c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441346 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fffc4838-3f04-4867-b948-b40f642203de-audit-dir\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441364 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6164df-7beb-4da5-8853-a0785999d30c-config\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441383 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441399 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0c87798-bf92-4257-95df-a5aa1c305994-serving-cert\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441427 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-encryption-config\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441501 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-client\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441587 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441610 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-audit-policies\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441672 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441697 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e6164df-7beb-4da5-8853-a0785999d30c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441774 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-audit-policies\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441797 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-service-ca\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441849 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af89d20b-5709-4b8c-ade4-16f111487525-audit-dir\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441873 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h97kh\" (UniqueName: \"kubernetes.io/projected/af89d20b-5709-4b8c-ade4-16f111487525-kube-api-access-h97kh\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441917 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441937 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441953 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4k2f\" (UniqueName: \"kubernetes.io/projected/fffc4838-3f04-4867-b948-b40f642203de-kube-api-access-f4k2f\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.441977 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442018 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-ca\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442037 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e411fb84-4e27-4611-80b5-9fd52e71441e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qhchn\" (UID: \"e411fb84-4e27-4611-80b5-9fd52e71441e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442057 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442089 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442108 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442167 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442216 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-config\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442314 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442352 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.442374 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-etcd-client\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.451728 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.452380 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.452471 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.453112 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.456989 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dlh8x"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.457601 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.457662 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.468847 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.475749 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.476682 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j7hbz"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.478102 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-jq95c"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.481740 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.483641 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-j6lgc"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.484497 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.487412 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-ptzkb"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.487431 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4j7rj"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.489128 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9f7zg"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.489695 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6mnpz"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.490662 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.491968 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-kcpds"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.492431 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.495605 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.495632 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6nbbk"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.503366 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dlh8x"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.503471 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s9g5t"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.504551 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.507913 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.507940 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.507950 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.519155 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.523357 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.523940 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.526421 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.538242 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.538290 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.539662 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.540767 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hpr4x"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.542048 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-k9qc2"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543376 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-audit-policies\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543415 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543451 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e6164df-7beb-4da5-8853-a0785999d30c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543492 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-audit-policies\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543519 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-service-ca\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543571 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af89d20b-5709-4b8c-ade4-16f111487525-audit-dir\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543606 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h97kh\" (UniqueName: \"kubernetes.io/projected/af89d20b-5709-4b8c-ade4-16f111487525-kube-api-access-h97kh\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543635 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543649 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-9lnf6"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543662 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543696 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4k2f\" (UniqueName: \"kubernetes.io/projected/fffc4838-3f04-4867-b948-b40f642203de-kube-api-access-f4k2f\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543741 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543794 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e411fb84-4e27-4611-80b5-9fd52e71441e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qhchn\" (UID: \"e411fb84-4e27-4611-80b5-9fd52e71441e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543824 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543861 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-ca\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543887 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543921 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543945 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-config\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.543990 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544015 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544042 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544066 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-etcd-client\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544094 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wh8v\" (UniqueName: \"kubernetes.io/projected/e411fb84-4e27-4611-80b5-9fd52e71441e-kube-api-access-5wh8v\") pod \"control-plane-machine-set-operator-78cbb6b69f-qhchn\" (UID: \"e411fb84-4e27-4611-80b5-9fd52e71441e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544121 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544147 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-serving-cert\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544173 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9c5f\" (UniqueName: \"kubernetes.io/projected/c0c87798-bf92-4257-95df-a5aa1c305994-kube-api-access-l9c5f\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544197 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e6164df-7beb-4da5-8853-a0785999d30c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544221 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6164df-7beb-4da5-8853-a0785999d30c-config\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544246 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fffc4838-3f04-4867-b948-b40f642203de-audit-dir\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544275 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0c87798-bf92-4257-95df-a5aa1c305994-serving-cert\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544304 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544343 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-encryption-config\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544413 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-audit-policies\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544434 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-client\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544464 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-service-ca\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544485 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544620 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/af89d20b-5709-4b8c-ade4-16f111487525-audit-dir\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.545210 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-audit-policies\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.544427 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.545303 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.545469 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.546238 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.546925 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-f87b4"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.547938 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.549681 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.549781 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.549796 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.549949 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6mnpz"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.550167 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-client\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.550293 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-etcd-ca\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.550907 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.551067 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-etcd-client\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.551134 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e6164df-7beb-4da5-8853-a0785999d30c-config\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.551397 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c87798-bf92-4257-95df-a5aa1c305994-config\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.551478 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.551572 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fffc4838-3f04-4867-b948-b40f642203de-audit-dir\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.552031 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/af89d20b-5709-4b8c-ade4-16f111487525-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.552199 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e411fb84-4e27-4611-80b5-9fd52e71441e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qhchn\" (UID: \"e411fb84-4e27-4611-80b5-9fd52e71441e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.552421 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-serving-cert\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.552826 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.553595 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.553762 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/af89d20b-5709-4b8c-ade4-16f111487525-encryption-config\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.554037 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e6164df-7beb-4da5-8853-a0785999d30c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.554304 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0c87798-bf92-4257-95df-a5aa1c305994-serving-cert\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.554942 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ddprm"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.556046 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.556595 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.557796 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.558951 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kcpds"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.560104 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.560603 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.562676 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.564069 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.564631 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.564846 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5c4b8"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.566336 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.567777 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-f87b4"] Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.579292 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.598205 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.618996 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.639563 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.659101 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.678778 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.699629 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.719012 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.739506 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.759300 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.799007 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.819180 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.839185 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.859812 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.879182 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.899118 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.918375 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.939322 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.950909 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.960184 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.979091 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 06:53:00 crc kubenswrapper[4822]: I1201 06:53:00.998540 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.018924 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.038936 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.059416 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.086712 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.098725 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.130012 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.138718 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.159028 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.179323 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.198806 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.218634 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.238869 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.259403 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.279176 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.299250 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.319512 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.339475 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.359460 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.377780 4822 request.go:700] Waited for 1.000644333s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-dockercfg-zdk86&limit=500&resourceVersion=0 Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.379794 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.399914 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.418989 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.438661 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.459920 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.479028 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.520136 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.541048 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.555839 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-tls\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.555888 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92434bb5-42fe-4d2e-8a9e-0517b46c6774-serving-cert\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.555925 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee914768-03f2-4378-9db1-02fdc1b8d048-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.555960 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-client-ca\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556008 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-config\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556030 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0126cc59-9259-43c1-91ea-cdc05047bbee-images\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556053 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0416c999-c003-42ff-8c6e-30357b23975d-serving-cert\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556144 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0a22c90f-806d-43a3-b68a-321f5763b64c-images\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556211 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-serving-cert\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556262 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-etcd-serving-ca\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556374 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba3ade7b-ad1c-4f60-9cde-f7a198336912-ca-trust-extracted\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556488 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556520 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-service-ca-bundle\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556544 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca838455-398e-4c35-844e-201e3fe65b5d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556588 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4570adbe-e705-4cbe-aecb-b27a3b930048-serving-cert\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556610 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bab12393-6d6a-459b-8054-0934d99ffd0f-audit-dir\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556634 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556666 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-etcd-client\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556689 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-encryption-config\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556730 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556755 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-certificates\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556782 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-bound-sa-token\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556804 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgcrk\" (UniqueName: \"kubernetes.io/projected/0a22c90f-806d-43a3-b68a-321f5763b64c-kube-api-access-cgcrk\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556848 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgmng\" (UniqueName: \"kubernetes.io/projected/bab12393-6d6a-459b-8054-0934d99ffd0f-kube-api-access-sgmng\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556872 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-config\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556897 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca838455-398e-4c35-844e-201e3fe65b5d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556931 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4zlg\" (UniqueName: \"kubernetes.io/projected/0416c999-c003-42ff-8c6e-30357b23975d-kube-api-access-q4zlg\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556954 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-725wm\" (UniqueName: \"kubernetes.io/projected/ca838455-398e-4c35-844e-201e3fe65b5d-kube-api-access-725wm\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.556976 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1fd76610-c79c-4c82-8b29-9038aa0ac22c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557003 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee914768-03f2-4378-9db1-02fdc1b8d048-config\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557033 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjtvr\" (UniqueName: \"kubernetes.io/projected/4570adbe-e705-4cbe-aecb-b27a3b930048-kube-api-access-gjtvr\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557052 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-audit\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557077 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr4k9\" (UniqueName: \"kubernetes.io/projected/1fd76610-c79c-4c82-8b29-9038aa0ac22c-kube-api-access-kr4k9\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557108 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-config\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557128 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba3ade7b-ad1c-4f60-9cde-f7a198336912-installation-pull-secrets\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557145 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-client-ca\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557165 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-config\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557187 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nxrr\" (UniqueName: \"kubernetes.io/projected/0126cc59-9259-43c1-91ea-cdc05047bbee-kube-api-access-8nxrr\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557210 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrn7l\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-kube-api-access-hrn7l\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557234 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwssj\" (UniqueName: \"kubernetes.io/projected/ee914768-03f2-4378-9db1-02fdc1b8d048-kube-api-access-xwssj\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557255 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5c5s\" (UniqueName: \"kubernetes.io/projected/92434bb5-42fe-4d2e-8a9e-0517b46c6774-kube-api-access-t5c5s\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557273 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-serving-cert\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557347 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9fln\" (UniqueName: \"kubernetes.io/projected/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-kube-api-access-n9fln\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557373 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0126cc59-9259-43c1-91ea-cdc05047bbee-config\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557392 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1fd76610-c79c-4c82-8b29-9038aa0ac22c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557412 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1fd76610-c79c-4c82-8b29-9038aa0ac22c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557459 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-image-import-ca\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557478 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a22c90f-806d-43a3-b68a-321f5763b64c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557516 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557535 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bab12393-6d6a-459b-8054-0934d99ffd0f-node-pullsecrets\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557559 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a22c90f-806d-43a3-b68a-321f5763b64c-proxy-tls\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557597 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0416c999-c003-42ff-8c6e-30357b23975d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557620 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-trusted-ca\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.557697 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0126cc59-9259-43c1-91ea-cdc05047bbee-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.558145 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.058128242 +0000 UTC m=+137.378935938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.560123 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.578872 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.599937 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.618480 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.638948 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.658530 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.658897 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.659089 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.159058227 +0000 UTC m=+137.479865913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659218 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca838455-398e-4c35-844e-201e3fe65b5d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659298 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-config\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659379 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a2a429db-67df-45d5-81d0-8077eb96b9e4-node-bootstrap-token\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659439 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659500 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-725wm\" (UniqueName: \"kubernetes.io/projected/ca838455-398e-4c35-844e-201e3fe65b5d-kube-api-access-725wm\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659552 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-csi-data-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659619 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbs8j\" (UniqueName: \"kubernetes.io/projected/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-kube-api-access-zbs8j\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659654 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0849c6-b1d6-4173-8a67-e97369b1cf56-config\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659792 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmld8\" (UniqueName: \"kubernetes.io/projected/823bb36f-f372-441d-a483-85603bbac215-kube-api-access-xmld8\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659896 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-service-ca\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659947 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-audit\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.659980 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-registration-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660012 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-config\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660045 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660081 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba3ade7b-ad1c-4f60-9cde-f7a198336912-installation-pull-secrets\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660119 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-client-ca\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660396 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-config\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660405 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-config\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660470 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6577ee23-bf0c-4452-adfa-40e6b6b0678c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660527 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/282e7604-ab9e-4255-a9a2-bc32c090ecd5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660559 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt7nw\" (UniqueName: \"kubernetes.io/projected/837c103b-8351-4e7d-b00b-1b51640a1dd2-kube-api-access-xt7nw\") pod \"multus-admission-controller-857f4d67dd-s9g5t\" (UID: \"837c103b-8351-4e7d-b00b-1b51640a1dd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660624 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5c5s\" (UniqueName: \"kubernetes.io/projected/92434bb5-42fe-4d2e-8a9e-0517b46c6774-kube-api-access-t5c5s\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660653 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9fln\" (UniqueName: \"kubernetes.io/projected/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-kube-api-access-n9fln\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660680 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grlwl\" (UniqueName: \"kubernetes.io/projected/fee44223-9d68-4b77-807a-a806cd0e842f-kube-api-access-grlwl\") pod \"package-server-manager-789f6589d5-lw8s9\" (UID: \"fee44223-9d68-4b77-807a-a806cd0e842f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.660963 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/837c103b-8351-4e7d-b00b-1b51640a1dd2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s9g5t\" (UID: \"837c103b-8351-4e7d-b00b-1b51640a1dd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661018 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-audit\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661047 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-default-certificate\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661087 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1fd76610-c79c-4c82-8b29-9038aa0ac22c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661110 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fd504091-d7b4-4355-b17f-85eedfbb4731-apiservice-cert\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661134 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mxtw\" (UniqueName: \"kubernetes.io/projected/f3ee1c28-9784-4602-949b-c877f1c71adf-kube-api-access-2mxtw\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661173 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a22c90f-806d-43a3-b68a-321f5763b64c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661240 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d0849c6-b1d6-4173-8a67-e97369b1cf56-auth-proxy-config\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661264 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661287 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bab12393-6d6a-459b-8054-0934d99ffd0f-node-pullsecrets\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661314 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-oauth-serving-cert\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661341 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1ef340e9-1eee-41e6-802f-4ee1768acf70-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4thh8\" (UID: \"1ef340e9-1eee-41e6-802f-4ee1768acf70\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661364 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28bcb0b1-e632-4b21-8771-e41772fe28f8-trusted-ca\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661389 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6577ee23-bf0c-4452-adfa-40e6b6b0678c-metrics-tls\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661425 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptqh7\" (UniqueName: \"kubernetes.io/projected/1645e90d-8ddd-4df9-82cd-e50edcac62ad-kube-api-access-ptqh7\") pod \"dns-operator-744455d44c-4j7rj\" (UID: \"1645e90d-8ddd-4df9-82cd-e50edcac62ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661466 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92434bb5-42fe-4d2e-8a9e-0517b46c6774-serving-cert\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661488 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-plugins-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661512 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661535 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/282e7604-ab9e-4255-a9a2-bc32c090ecd5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661590 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-proxy-tls\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661639 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0416c999-c003-42ff-8c6e-30357b23975d-serving-cert\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661645 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bab12393-6d6a-459b-8054-0934d99ffd0f-node-pullsecrets\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661668 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/47aa0319-23cc-454f-9715-4c1f61d8009d-profile-collector-cert\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661780 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0126cc59-9259-43c1-91ea-cdc05047bbee-images\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661831 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a22c90f-806d-43a3-b68a-321f5763b64c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661847 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9fafefcc-1420-4d16-a98e-56e31a3864ad-service-ca-bundle\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661939 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6v2v\" (UniqueName: \"kubernetes.io/projected/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-kube-api-access-h6v2v\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.661964 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-config\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662025 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-signing-key\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662077 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdm4l\" (UniqueName: \"kubernetes.io/projected/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-kube-api-access-fdm4l\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662255 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662449 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-secret-volume\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662516 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1fd76610-c79c-4c82-8b29-9038aa0ac22c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662522 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4570adbe-e705-4cbe-aecb-b27a3b930048-serving-cert\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662590 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jnkl\" (UniqueName: \"kubernetes.io/projected/a2a429db-67df-45d5-81d0-8077eb96b9e4-kube-api-access-5jnkl\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662619 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf3b502-078c-46b2-8d35-40ae4a29cbd1-cert\") pod \"ingress-canary-kcpds\" (UID: \"fdf3b502-078c-46b2-8d35-40ae4a29cbd1\") " pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662654 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662677 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-encryption-config\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662707 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662751 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-stats-auth\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662780 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-mountpoint-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662803 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662818 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgmng\" (UniqueName: \"kubernetes.io/projected/bab12393-6d6a-459b-8054-0934d99ffd0f-kube-api-access-sgmng\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.662994 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4zlg\" (UniqueName: \"kubernetes.io/projected/0416c999-c003-42ff-8c6e-30357b23975d-kube-api-access-q4zlg\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663091 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-srv-cert\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.663112 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.16309484 +0000 UTC m=+137.483902616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663221 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1fd76610-c79c-4c82-8b29-9038aa0ac22c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663320 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-config-volume\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663378 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-signing-cabundle\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663380 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0126cc59-9259-43c1-91ea-cdc05047bbee-images\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663487 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-socket-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663547 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee914768-03f2-4378-9db1-02fdc1b8d048-config\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663634 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/fee44223-9d68-4b77-807a-a806cd0e842f-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lw8s9\" (UID: \"fee44223-9d68-4b77-807a-a806cd0e842f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663672 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/282e7604-ab9e-4255-a9a2-bc32c090ecd5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663720 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjtvr\" (UniqueName: \"kubernetes.io/projected/4570adbe-e705-4cbe-aecb-b27a3b930048-kube-api-access-gjtvr\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.664723 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fd504091-d7b4-4355-b17f-85eedfbb4731-tmpfs\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.663832 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.664771 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwj4k\" (UniqueName: \"kubernetes.io/projected/9fafefcc-1420-4d16-a98e-56e31a3864ad-kube-api-access-cwj4k\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.664894 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr4k9\" (UniqueName: \"kubernetes.io/projected/1fd76610-c79c-4c82-8b29-9038aa0ac22c-kube-api-access-kr4k9\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.664936 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-config\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.664981 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1d0849c6-b1d6-4173-8a67-e97369b1cf56-machine-approver-tls\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665010 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665040 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtlj5\" (UniqueName: \"kubernetes.io/projected/839d69e8-399b-4c30-b64f-893327a389e7-kube-api-access-xtlj5\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665066 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665098 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nxrr\" (UniqueName: \"kubernetes.io/projected/0126cc59-9259-43c1-91ea-cdc05047bbee-kube-api-access-8nxrr\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665130 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665161 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc49q\" (UniqueName: \"kubernetes.io/projected/5485f8d6-493f-4b42-88a6-363043c13a90-kube-api-access-fc49q\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665197 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrn7l\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-kube-api-access-hrn7l\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665222 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-trusted-ca-bundle\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665245 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-metrics-certs\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665272 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-serving-cert\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665294 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwssj\" (UniqueName: \"kubernetes.io/projected/ee914768-03f2-4378-9db1-02fdc1b8d048-kube-api-access-xwssj\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665316 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/47aa0319-23cc-454f-9715-4c1f61d8009d-srv-cert\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665338 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0126cc59-9259-43c1-91ea-cdc05047bbee-config\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665359 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1fd76610-c79c-4c82-8b29-9038aa0ac22c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665381 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3ee1c28-9784-4602-949b-c877f1c71adf-config\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665406 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxrwx\" (UniqueName: \"kubernetes.io/projected/47aa0319-23cc-454f-9715-4c1f61d8009d-kube-api-access-dxrwx\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665436 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-image-import-ca\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665457 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3ee1c28-9784-4602-949b-c877f1c71adf-serving-cert\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665506 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a22c90f-806d-43a3-b68a-321f5763b64c-proxy-tls\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665529 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-trusted-ca\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665558 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0416c999-c003-42ff-8c6e-30357b23975d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665610 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1645e90d-8ddd-4df9-82cd-e50edcac62ad-metrics-tls\") pod \"dns-operator-744455d44c-4j7rj\" (UID: \"1645e90d-8ddd-4df9-82cd-e50edcac62ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665645 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6577ee23-bf0c-4452-adfa-40e6b6b0678c-trusted-ca\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665686 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/03c39fe9-7006-45af-8c8f-6f970b3a9656-metrics-tls\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665903 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0126cc59-9259-43c1-91ea-cdc05047bbee-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665931 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee914768-03f2-4378-9db1-02fdc1b8d048-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665931 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee914768-03f2-4378-9db1-02fdc1b8d048-config\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665954 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-tls\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.665965 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0416c999-c003-42ff-8c6e-30357b23975d-serving-cert\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666012 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-client-ca\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666039 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk59w\" (UniqueName: \"kubernetes.io/projected/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-kube-api-access-lk59w\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666075 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fv72\" (UniqueName: \"kubernetes.io/projected/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-kube-api-access-7fv72\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666179 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-serving-cert\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666217 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fd504091-d7b4-4355-b17f-85eedfbb4731-webhook-cert\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666246 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/6577ee23-bf0c-4452-adfa-40e6b6b0678c-kube-api-access-7pclc\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666274 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5vmh\" (UniqueName: \"kubernetes.io/projected/03c39fe9-7006-45af-8c8f-6f970b3a9656-kube-api-access-t5vmh\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666312 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-config\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666335 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqwjs\" (UniqueName: \"kubernetes.io/projected/28bcb0b1-e632-4b21-8771-e41772fe28f8-kube-api-access-vqwjs\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666360 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-serving-cert\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666381 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0a22c90f-806d-43a3-b68a-321f5763b64c-images\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666407 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-etcd-serving-ca\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666428 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-console-config\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666456 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28bcb0b1-e632-4b21-8771-e41772fe28f8-serving-cert\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666505 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/03c39fe9-7006-45af-8c8f-6f970b3a9656-config-volume\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666530 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba3ade7b-ad1c-4f60-9cde-f7a198336912-ca-trust-extracted\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666712 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbztp\" (UniqueName: \"kubernetes.io/projected/fdf3b502-078c-46b2-8d35-40ae4a29cbd1-kube-api-access-fbztp\") pod \"ingress-canary-kcpds\" (UID: \"fdf3b502-078c-46b2-8d35-40ae4a29cbd1\") " pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666748 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666771 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-service-ca-bundle\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666794 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca838455-398e-4c35-844e-201e3fe65b5d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666816 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bab12393-6d6a-459b-8054-0934d99ffd0f-audit-dir\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666846 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-oauth-config\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.667302 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba3ade7b-ad1c-4f60-9cde-f7a198336912-ca-trust-extracted\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.667682 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-config\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.668165 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca838455-398e-4c35-844e-201e3fe65b5d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.668225 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-client-ca\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.668431 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-image-import-ca\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.669049 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0416c999-c003-42ff-8c6e-30357b23975d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.669209 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-serving-cert\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.669522 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0126cc59-9259-43c1-91ea-cdc05047bbee-config\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.669824 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-trusted-ca\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.666868 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.670059 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0a22c90f-806d-43a3-b68a-321f5763b64c-images\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.670081 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bab12393-6d6a-459b-8054-0934d99ffd0f-etcd-serving-ca\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.670150 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bab12393-6d6a-459b-8054-0934d99ffd0f-audit-dir\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.670838 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1fd76610-c79c-4c82-8b29-9038aa0ac22c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.671782 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-config\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.671892 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee914768-03f2-4378-9db1-02fdc1b8d048-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.671964 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-etcd-client\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.672005 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a2a429db-67df-45d5-81d0-8077eb96b9e4-certs\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.672107 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44t7r\" (UniqueName: \"kubernetes.io/projected/1ef340e9-1eee-41e6-802f-4ee1768acf70-kube-api-access-44t7r\") pod \"cluster-samples-operator-665b6dd947-4thh8\" (UID: \"1ef340e9-1eee-41e6-802f-4ee1768acf70\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.672503 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-client-ca\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.672532 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a22c90f-806d-43a3-b68a-321f5763b64c-proxy-tls\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.672698 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92434bb5-42fe-4d2e-8a9e-0517b46c6774-serving-cert\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673190 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-tls\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673262 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673358 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca838455-398e-4c35-844e-201e3fe65b5d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673430 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92434bb5-42fe-4d2e-8a9e-0517b46c6774-service-ca-bundle\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673512 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wcs7\" (UniqueName: \"kubernetes.io/projected/9170d425-7fae-40d9-aab0-7d2afbaa56e4-kube-api-access-5wcs7\") pod \"downloads-7954f5f757-k9qc2\" (UID: \"9170d425-7fae-40d9-aab0-7d2afbaa56e4\") " pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673611 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-certificates\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673677 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7bqw\" (UniqueName: \"kubernetes.io/projected/1d0849c6-b1d6-4173-8a67-e97369b1cf56-kube-api-access-g7bqw\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673722 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkxlt\" (UniqueName: \"kubernetes.io/projected/27ab4099-8ed2-4115-92f7-07109c42430f-kube-api-access-mkxlt\") pod \"migrator-59844c95c7-9hgl8\" (UID: \"27ab4099-8ed2-4115-92f7-07109c42430f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673792 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgcrk\" (UniqueName: \"kubernetes.io/projected/0a22c90f-806d-43a3-b68a-321f5763b64c-kube-api-access-cgcrk\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.673902 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-bound-sa-token\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.674270 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpz9h\" (UniqueName: \"kubernetes.io/projected/fd504091-d7b4-4355-b17f-85eedfbb4731-kube-api-access-gpz9h\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.674309 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28bcb0b1-e632-4b21-8771-e41772fe28f8-config\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.674325 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba3ade7b-ad1c-4f60-9cde-f7a198336912-installation-pull-secrets\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.675141 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0126cc59-9259-43c1-91ea-cdc05047bbee-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.675206 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-serving-cert\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.675291 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-encryption-config\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.675613 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-certificates\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.677926 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bab12393-6d6a-459b-8054-0934d99ffd0f-etcd-client\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.678980 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.680936 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4570adbe-e705-4cbe-aecb-b27a3b930048-serving-cert\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.699213 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.718301 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.739400 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.758504 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.775730 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.775787 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.275768994 +0000 UTC m=+137.596576690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776199 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d0849c6-b1d6-4173-8a67-e97369b1cf56-auth-proxy-config\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776237 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-oauth-serving-cert\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776269 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28bcb0b1-e632-4b21-8771-e41772fe28f8-trusted-ca\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776301 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6577ee23-bf0c-4452-adfa-40e6b6b0678c-metrics-tls\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776333 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptqh7\" (UniqueName: \"kubernetes.io/projected/1645e90d-8ddd-4df9-82cd-e50edcac62ad-kube-api-access-ptqh7\") pod \"dns-operator-744455d44c-4j7rj\" (UID: \"1645e90d-8ddd-4df9-82cd-e50edcac62ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776360 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1ef340e9-1eee-41e6-802f-4ee1768acf70-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4thh8\" (UID: \"1ef340e9-1eee-41e6-802f-4ee1768acf70\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776382 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-plugins-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776406 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/282e7604-ab9e-4255-a9a2-bc32c090ecd5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776428 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-proxy-tls\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776450 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776473 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/47aa0319-23cc-454f-9715-4c1f61d8009d-profile-collector-cert\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776499 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6v2v\" (UniqueName: \"kubernetes.io/projected/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-kube-api-access-h6v2v\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776523 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9fafefcc-1420-4d16-a98e-56e31a3864ad-service-ca-bundle\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776546 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-signing-key\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776594 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdm4l\" (UniqueName: \"kubernetes.io/projected/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-kube-api-access-fdm4l\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776616 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776648 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-secret-volume\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776673 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf3b502-078c-46b2-8d35-40ae4a29cbd1-cert\") pod \"ingress-canary-kcpds\" (UID: \"fdf3b502-078c-46b2-8d35-40ae4a29cbd1\") " pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776705 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jnkl\" (UniqueName: \"kubernetes.io/projected/a2a429db-67df-45d5-81d0-8077eb96b9e4-kube-api-access-5jnkl\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776732 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776753 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-stats-auth\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776793 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-mountpoint-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776822 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-srv-cert\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776858 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-config-volume\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776880 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-signing-cabundle\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776905 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-socket-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776928 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/fee44223-9d68-4b77-807a-a806cd0e842f-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lw8s9\" (UID: \"fee44223-9d68-4b77-807a-a806cd0e842f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776950 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/282e7604-ab9e-4255-a9a2-bc32c090ecd5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.776979 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fd504091-d7b4-4355-b17f-85eedfbb4731-tmpfs\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777000 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwj4k\" (UniqueName: \"kubernetes.io/projected/9fafefcc-1420-4d16-a98e-56e31a3864ad-kube-api-access-cwj4k\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777032 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d0849c6-b1d6-4173-8a67-e97369b1cf56-auth-proxy-config\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777045 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1d0849c6-b1d6-4173-8a67-e97369b1cf56-machine-approver-tls\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777067 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777090 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtlj5\" (UniqueName: \"kubernetes.io/projected/839d69e8-399b-4c30-b64f-893327a389e7-kube-api-access-xtlj5\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777111 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777148 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777170 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc49q\" (UniqueName: \"kubernetes.io/projected/5485f8d6-493f-4b42-88a6-363043c13a90-kube-api-access-fc49q\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777199 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-trusted-ca-bundle\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777222 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-metrics-certs\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777288 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/47aa0319-23cc-454f-9715-4c1f61d8009d-srv-cert\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777318 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3ee1c28-9784-4602-949b-c877f1c71adf-config\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777326 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-plugins-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777340 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxrwx\" (UniqueName: \"kubernetes.io/projected/47aa0319-23cc-454f-9715-4c1f61d8009d-kube-api-access-dxrwx\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777538 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3ee1c28-9784-4602-949b-c877f1c71adf-serving-cert\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777644 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6577ee23-bf0c-4452-adfa-40e6b6b0678c-trusted-ca\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777701 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/03c39fe9-7006-45af-8c8f-6f970b3a9656-metrics-tls\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777738 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1645e90d-8ddd-4df9-82cd-e50edcac62ad-metrics-tls\") pod \"dns-operator-744455d44c-4j7rj\" (UID: \"1645e90d-8ddd-4df9-82cd-e50edcac62ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777788 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk59w\" (UniqueName: \"kubernetes.io/projected/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-kube-api-access-lk59w\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777842 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-serving-cert\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777873 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fd504091-d7b4-4355-b17f-85eedfbb4731-webhook-cert\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777905 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/6577ee23-bf0c-4452-adfa-40e6b6b0678c-kube-api-access-7pclc\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777943 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fv72\" (UniqueName: \"kubernetes.io/projected/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-kube-api-access-7fv72\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.777977 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5vmh\" (UniqueName: \"kubernetes.io/projected/03c39fe9-7006-45af-8c8f-6f970b3a9656-kube-api-access-t5vmh\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778012 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqwjs\" (UniqueName: \"kubernetes.io/projected/28bcb0b1-e632-4b21-8771-e41772fe28f8-kube-api-access-vqwjs\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778046 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-console-config\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778080 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28bcb0b1-e632-4b21-8771-e41772fe28f8-serving-cert\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778118 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/03c39fe9-7006-45af-8c8f-6f970b3a9656-config-volume\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778152 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbztp\" (UniqueName: \"kubernetes.io/projected/fdf3b502-078c-46b2-8d35-40ae4a29cbd1-kube-api-access-fbztp\") pod \"ingress-canary-kcpds\" (UID: \"fdf3b502-078c-46b2-8d35-40ae4a29cbd1\") " pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778188 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778222 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a2a429db-67df-45d5-81d0-8077eb96b9e4-certs\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778256 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-oauth-config\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778303 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44t7r\" (UniqueName: \"kubernetes.io/projected/1ef340e9-1eee-41e6-802f-4ee1768acf70-kube-api-access-44t7r\") pod \"cluster-samples-operator-665b6dd947-4thh8\" (UID: \"1ef340e9-1eee-41e6-802f-4ee1768acf70\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778334 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wcs7\" (UniqueName: \"kubernetes.io/projected/9170d425-7fae-40d9-aab0-7d2afbaa56e4-kube-api-access-5wcs7\") pod \"downloads-7954f5f757-k9qc2\" (UID: \"9170d425-7fae-40d9-aab0-7d2afbaa56e4\") " pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778375 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7bqw\" (UniqueName: \"kubernetes.io/projected/1d0849c6-b1d6-4173-8a67-e97369b1cf56-kube-api-access-g7bqw\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778413 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkxlt\" (UniqueName: \"kubernetes.io/projected/27ab4099-8ed2-4115-92f7-07109c42430f-kube-api-access-mkxlt\") pod \"migrator-59844c95c7-9hgl8\" (UID: \"27ab4099-8ed2-4115-92f7-07109c42430f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778469 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpz9h\" (UniqueName: \"kubernetes.io/projected/fd504091-d7b4-4355-b17f-85eedfbb4731-kube-api-access-gpz9h\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778507 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28bcb0b1-e632-4b21-8771-e41772fe28f8-config\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778554 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778612 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a2a429db-67df-45d5-81d0-8077eb96b9e4-node-bootstrap-token\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778634 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-csi-data-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778659 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbs8j\" (UniqueName: \"kubernetes.io/projected/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-kube-api-access-zbs8j\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778695 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0849c6-b1d6-4173-8a67-e97369b1cf56-config\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778730 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-service-ca\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778765 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-registration-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778797 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmld8\" (UniqueName: \"kubernetes.io/projected/823bb36f-f372-441d-a483-85603bbac215-kube-api-access-xmld8\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778832 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778863 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-config\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778885 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6577ee23-bf0c-4452-adfa-40e6b6b0678c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778909 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/282e7604-ab9e-4255-a9a2-bc32c090ecd5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778936 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt7nw\" (UniqueName: \"kubernetes.io/projected/837c103b-8351-4e7d-b00b-1b51640a1dd2-kube-api-access-xt7nw\") pod \"multus-admission-controller-857f4d67dd-s9g5t\" (UID: \"837c103b-8351-4e7d-b00b-1b51640a1dd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778939 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778988 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/837c103b-8351-4e7d-b00b-1b51640a1dd2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s9g5t\" (UID: \"837c103b-8351-4e7d-b00b-1b51640a1dd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779016 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-default-certificate\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779062 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grlwl\" (UniqueName: \"kubernetes.io/projected/fee44223-9d68-4b77-807a-a806cd0e842f-kube-api-access-grlwl\") pod \"package-server-manager-789f6589d5-lw8s9\" (UID: \"fee44223-9d68-4b77-807a-a806cd0e842f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779110 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mxtw\" (UniqueName: \"kubernetes.io/projected/f3ee1c28-9784-4602-949b-c877f1c71adf-kube-api-access-2mxtw\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779145 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fd504091-d7b4-4355-b17f-85eedfbb4731-apiservice-cert\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779222 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-oauth-serving-cert\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779660 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.779723 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.279457807 +0000 UTC m=+137.600265663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.779895 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28bcb0b1-e632-4b21-8771-e41772fe28f8-trusted-ca\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.780225 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9fafefcc-1420-4d16-a98e-56e31a3864ad-service-ca-bundle\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.780242 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6577ee23-bf0c-4452-adfa-40e6b6b0678c-trusted-ca\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.780675 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fd504091-d7b4-4355-b17f-85eedfbb4731-tmpfs\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.780681 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-mountpoint-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.780797 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-csi-data-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.780905 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-socket-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.778663 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/282e7604-ab9e-4255-a9a2-bc32c090ecd5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.781673 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d0849c6-b1d6-4173-8a67-e97369b1cf56-config\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.782337 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3ee1c28-9784-4602-949b-c877f1c71adf-config\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.783246 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28bcb0b1-e632-4b21-8771-e41772fe28f8-config\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.783054 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1ef340e9-1eee-41e6-802f-4ee1768acf70-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4thh8\" (UID: \"1ef340e9-1eee-41e6-802f-4ee1768acf70\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.783157 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/823bb36f-f372-441d-a483-85603bbac215-registration-dir\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.783235 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-config\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.782558 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-proxy-tls\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.783804 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6577ee23-bf0c-4452-adfa-40e6b6b0678c-metrics-tls\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.784918 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-service-ca\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.786074 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-serving-cert\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.786343 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/837c103b-8351-4e7d-b00b-1b51640a1dd2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s9g5t\" (UID: \"837c103b-8351-4e7d-b00b-1b51640a1dd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.786414 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28bcb0b1-e632-4b21-8771-e41772fe28f8-serving-cert\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.786805 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3ee1c28-9784-4602-949b-c877f1c71adf-serving-cert\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.788183 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-oauth-config\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.788533 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1d0849c6-b1d6-4173-8a67-e97369b1cf56-machine-approver-tls\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.789862 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-trusted-ca-bundle\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.789992 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.791493 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.791532 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/282e7604-ab9e-4255-a9a2-bc32c090ecd5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.791542 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1645e90d-8ddd-4df9-82cd-e50edcac62ad-metrics-tls\") pod \"dns-operator-744455d44c-4j7rj\" (UID: \"1645e90d-8ddd-4df9-82cd-e50edcac62ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.791881 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-metrics-certs\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.791886 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fd504091-d7b4-4355-b17f-85eedfbb4731-apiservice-cert\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.792685 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-stats-auth\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.797445 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9fafefcc-1420-4d16-a98e-56e31a3864ad-default-certificate\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.797799 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.798451 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fd504091-d7b4-4355-b17f-85eedfbb4731-webhook-cert\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.800062 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.803008 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-console-config\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.804474 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.809889 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.819543 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.839628 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.860371 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.871411 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/fee44223-9d68-4b77-807a-a806cd0e842f-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lw8s9\" (UID: \"fee44223-9d68-4b77-807a-a806cd0e842f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.879068 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.881171 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.881382 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.381355491 +0000 UTC m=+137.702163187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.882006 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.882026 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-config-volume\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.882378 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.382358982 +0000 UTC m=+137.703166678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.899407 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.919618 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.927769 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.933460 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/47aa0319-23cc-454f-9715-4c1f61d8009d-profile-collector-cert\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.934258 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-secret-volume\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.940084 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.947939 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/47aa0319-23cc-454f-9715-4c1f61d8009d-srv-cert\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.949803 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.949908 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.950101 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.958757 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.965044 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-srv-cert\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.979047 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.983465 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.983756 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.48372046 +0000 UTC m=+137.804528176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.983939 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:01 crc kubenswrapper[4822]: E1201 06:53:01.984467 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.484444632 +0000 UTC m=+137.805252358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:01 crc kubenswrapper[4822]: I1201 06:53:01.999388 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.002421 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-signing-cabundle\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.020218 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.032208 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-signing-key\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.040230 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.058659 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.078828 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.084922 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.085687 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.585671406 +0000 UTC m=+137.906479092 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.098532 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.118741 4822 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.139358 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.159528 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.179834 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.186835 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.187290 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.687263951 +0000 UTC m=+138.008071667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.199408 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.213037 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf3b502-078c-46b2-8d35-40ae4a29cbd1-cert\") pod \"ingress-canary-kcpds\" (UID: \"fdf3b502-078c-46b2-8d35-40ae4a29cbd1\") " pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.244881 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e6164df-7beb-4da5-8853-a0785999d30c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9lk5l\" (UID: \"0e6164df-7beb-4da5-8853-a0785999d30c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.255324 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h97kh\" (UniqueName: \"kubernetes.io/projected/af89d20b-5709-4b8c-ade4-16f111487525-kube-api-access-h97kh\") pod \"apiserver-7bbb656c7d-ttrnv\" (UID: \"af89d20b-5709-4b8c-ade4-16f111487525\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.280123 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.285226 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a2a429db-67df-45d5-81d0-8077eb96b9e4-node-bootstrap-token\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.287822 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4k2f\" (UniqueName: \"kubernetes.io/projected/fffc4838-3f04-4867-b948-b40f642203de-kube-api-access-f4k2f\") pod \"oauth-openshift-558db77b4-jq95c\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.288154 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.288268 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.788244887 +0000 UTC m=+138.109052593 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.288455 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.288972 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.788960139 +0000 UTC m=+138.109767825 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.312389 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wh8v\" (UniqueName: \"kubernetes.io/projected/e411fb84-4e27-4611-80b5-9fd52e71441e-kube-api-access-5wh8v\") pod \"control-plane-machine-set-operator-78cbb6b69f-qhchn\" (UID: \"e411fb84-4e27-4611-80b5-9fd52e71441e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.318569 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.318818 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.322544 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/03c39fe9-7006-45af-8c8f-6f970b3a9656-config-volume\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.339336 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.341865 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.344517 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a2a429db-67df-45d5-81d0-8077eb96b9e4-certs\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.359754 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.380345 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.390169 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.390338 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.890316696 +0000 UTC m=+138.211124382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.390613 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.390919 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.890909615 +0000 UTC m=+138.211717301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.399754 4822 request.go:700] Waited for 1.849662641s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.401538 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.411126 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.413713 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/03c39fe9-7006-45af-8c8f-6f970b3a9656-metrics-tls\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.437488 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9c5f\" (UniqueName: \"kubernetes.io/projected/c0c87798-bf92-4257-95df-a5aa1c305994-kube-api-access-l9c5f\") pod \"etcd-operator-b45778765-j7hbz\" (UID: \"c0c87798-bf92-4257-95df-a5aa1c305994\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.462027 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.479040 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.492235 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.493258 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:02.993233032 +0000 UTC m=+138.314040738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.495735 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.517197 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-725wm\" (UniqueName: \"kubernetes.io/projected/ca838455-398e-4c35-844e-201e3fe65b5d-kube-api-access-725wm\") pod \"kube-storage-version-migrator-operator-b67b599dd-fwgk6\" (UID: \"ca838455-398e-4c35-844e-201e3fe65b5d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.533162 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5c5s\" (UniqueName: \"kubernetes.io/projected/92434bb5-42fe-4d2e-8a9e-0517b46c6774-kube-api-access-t5c5s\") pod \"authentication-operator-69f744f599-j6lgc\" (UID: \"92434bb5-42fe-4d2e-8a9e-0517b46c6774\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.553544 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9fln\" (UniqueName: \"kubernetes.io/projected/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-kube-api-access-n9fln\") pod \"route-controller-manager-6576b87f9c-9sr2j\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.554232 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.574920 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgmng\" (UniqueName: \"kubernetes.io/projected/bab12393-6d6a-459b-8054-0934d99ffd0f-kube-api-access-sgmng\") pod \"apiserver-76f77b778f-t4mcw\" (UID: \"bab12393-6d6a-459b-8054-0934d99ffd0f\") " pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.575758 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn"] Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.585379 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" Dec 01 06:53:02 crc kubenswrapper[4822]: W1201 06:53:02.586321 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode411fb84_4e27_4611_80b5_9fd52e71441e.slice/crio-61026743f54816a433e3232490e62b533da49e58c91c129d25501a09b092ef46 WatchSource:0}: Error finding container 61026743f54816a433e3232490e62b533da49e58c91c129d25501a09b092ef46: Status 404 returned error can't find the container with id 61026743f54816a433e3232490e62b533da49e58c91c129d25501a09b092ef46 Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.594407 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.594925 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:03.094772245 +0000 UTC m=+138.415579931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.596609 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4zlg\" (UniqueName: \"kubernetes.io/projected/0416c999-c003-42ff-8c6e-30357b23975d-kube-api-access-q4zlg\") pod \"openshift-config-operator-7777fb866f-dkqzc\" (UID: \"0416c999-c003-42ff-8c6e-30357b23975d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.603499 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.615879 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjtvr\" (UniqueName: \"kubernetes.io/projected/4570adbe-e705-4cbe-aecb-b27a3b930048-kube-api-access-gjtvr\") pod \"controller-manager-879f6c89f-nmx5d\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.624167 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-jq95c"] Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.634622 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr4k9\" (UniqueName: \"kubernetes.io/projected/1fd76610-c79c-4c82-8b29-9038aa0ac22c-kube-api-access-kr4k9\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.654437 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nxrr\" (UniqueName: \"kubernetes.io/projected/0126cc59-9259-43c1-91ea-cdc05047bbee-kube-api-access-8nxrr\") pod \"machine-api-operator-5694c8668f-6nbbk\" (UID: \"0126cc59-9259-43c1-91ea-cdc05047bbee\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.663724 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l"] Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.683627 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrn7l\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-kube-api-access-hrn7l\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.684812 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.695196 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.695715 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:03.19569963 +0000 UTC m=+138.516507316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.698424 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" event={"ID":"e411fb84-4e27-4611-80b5-9fd52e71441e","Type":"ContainerStarted","Data":"61026743f54816a433e3232490e62b533da49e58c91c129d25501a09b092ef46"} Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.699297 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwssj\" (UniqueName: \"kubernetes.io/projected/ee914768-03f2-4378-9db1-02fdc1b8d048-kube-api-access-xwssj\") pod \"openshift-apiserver-operator-796bbdcf4f-c2l47\" (UID: \"ee914768-03f2-4378-9db1-02fdc1b8d048\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.704226 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.714078 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1fd76610-c79c-4c82-8b29-9038aa0ac22c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-dpb7m\" (UID: \"1fd76610-c79c-4c82-8b29-9038aa0ac22c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.731653 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.749387 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgcrk\" (UniqueName: \"kubernetes.io/projected/0a22c90f-806d-43a3-b68a-321f5763b64c-kube-api-access-cgcrk\") pod \"machine-config-operator-74547568cd-d4kcb\" (UID: \"0a22c90f-806d-43a3-b68a-321f5763b64c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.754110 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv"] Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.754374 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.756107 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-bound-sa-token\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.768470 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j7hbz"] Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.773385 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6v2v\" (UniqueName: \"kubernetes.io/projected/7ea02c2f-7f45-4a40-bedf-743a3adf7fb8-kube-api-access-h6v2v\") pod \"service-ca-9c57cc56f-dlh8x\" (UID: \"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8\") " pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.773643 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.796364 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.797019 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:03.297003116 +0000 UTC m=+138.617810802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.797319 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxrwx\" (UniqueName: \"kubernetes.io/projected/47aa0319-23cc-454f-9715-4c1f61d8009d-kube-api-access-dxrwx\") pod \"catalog-operator-68c6474976-q4c6k\" (UID: \"47aa0319-23cc-454f-9715-4c1f61d8009d\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.813450 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-j6lgc"] Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.816822 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptqh7\" (UniqueName: \"kubernetes.io/projected/1645e90d-8ddd-4df9-82cd-e50edcac62ad-kube-api-access-ptqh7\") pod \"dns-operator-744455d44c-4j7rj\" (UID: \"1645e90d-8ddd-4df9-82cd-e50edcac62ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.825863 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.832892 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.835140 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.848494 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.852597 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jnkl\" (UniqueName: \"kubernetes.io/projected/a2a429db-67df-45d5-81d0-8077eb96b9e4-kube-api-access-5jnkl\") pod \"machine-config-server-9lnf6\" (UID: \"a2a429db-67df-45d5-81d0-8077eb96b9e4\") " pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.898134 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-9lnf6" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.899747 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.899946 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:03.399919451 +0000 UTC m=+138.720727137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.900230 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:02 crc kubenswrapper[4822]: E1201 06:53:02.901506 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:03.401493449 +0000 UTC m=+138.722301135 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.911911 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/6577ee23-bf0c-4452-adfa-40e6b6b0678c-kube-api-access-7pclc\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.913117 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fv72\" (UniqueName: \"kubernetes.io/projected/fdaf1821-e93f-499d-b3d2-dca3ac09fa8e-kube-api-access-7fv72\") pod \"openshift-controller-manager-operator-756b6f6bc6-spbn2\" (UID: \"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.930738 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqwjs\" (UniqueName: \"kubernetes.io/projected/28bcb0b1-e632-4b21-8771-e41772fe28f8-kube-api-access-vqwjs\") pod \"console-operator-58897d9998-hpr4x\" (UID: \"28bcb0b1-e632-4b21-8771-e41772fe28f8\") " pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.933888 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5vmh\" (UniqueName: \"kubernetes.io/projected/03c39fe9-7006-45af-8c8f-6f970b3a9656-kube-api-access-t5vmh\") pod \"dns-default-f87b4\" (UID: \"03c39fe9-7006-45af-8c8f-6f970b3a9656\") " pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.934148 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" Dec 01 06:53:02 crc kubenswrapper[4822]: I1201 06:53:02.951826 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk59w\" (UniqueName: \"kubernetes.io/projected/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-kube-api-access-lk59w\") pod \"collect-profiles-29409525-szbr7\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.650999 4822 request.go:700] Waited for 1.201271623s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-operator-74547568cd-d4kcb Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.651926 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.652879 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.653363 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.653890 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.655812 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdm4l\" (UniqueName: \"kubernetes.io/projected/1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b-kube-api-access-fdm4l\") pod \"machine-config-controller-84d6567774-5xqgh\" (UID: \"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.682041 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.682903 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.683225 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.690274 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/282e7604-ab9e-4255-a9a2-bc32c090ecd5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8rkch\" (UID: \"282e7604-ab9e-4255-a9a2-bc32c090ecd5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.690276 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbztp\" (UniqueName: \"kubernetes.io/projected/fdf3b502-078c-46b2-8d35-40ae4a29cbd1-kube-api-access-fbztp\") pod \"ingress-canary-kcpds\" (UID: \"fdf3b502-078c-46b2-8d35-40ae4a29cbd1\") " pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.690445 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.691310 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6577ee23-bf0c-4452-adfa-40e6b6b0678c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zrxm2\" (UID: \"6577ee23-bf0c-4452-adfa-40e6b6b0678c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.691748 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7bqw\" (UniqueName: \"kubernetes.io/projected/1d0849c6-b1d6-4173-8a67-e97369b1cf56-kube-api-access-g7bqw\") pod \"machine-approver-56656f9798-4dl8p\" (UID: \"1d0849c6-b1d6-4173-8a67-e97369b1cf56\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.692247 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.692627 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 06:53:03 crc kubenswrapper[4822]: E1201 06:53:03.696589 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.696494706 +0000 UTC m=+140.017302452 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.699161 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44t7r\" (UniqueName: \"kubernetes.io/projected/1ef340e9-1eee-41e6-802f-4ee1768acf70-kube-api-access-44t7r\") pod \"cluster-samples-operator-665b6dd947-4thh8\" (UID: \"1ef340e9-1eee-41e6-802f-4ee1768acf70\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.701930 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkxlt\" (UniqueName: \"kubernetes.io/projected/27ab4099-8ed2-4115-92f7-07109c42430f-kube-api-access-mkxlt\") pod \"migrator-59844c95c7-9hgl8\" (UID: \"27ab4099-8ed2-4115-92f7-07109c42430f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.701943 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grlwl\" (UniqueName: \"kubernetes.io/projected/fee44223-9d68-4b77-807a-a806cd0e842f-kube-api-access-grlwl\") pod \"package-server-manager-789f6589d5-lw8s9\" (UID: \"fee44223-9d68-4b77-807a-a806cd0e842f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.703672 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt7nw\" (UniqueName: \"kubernetes.io/projected/837c103b-8351-4e7d-b00b-1b51640a1dd2-kube-api-access-xt7nw\") pod \"multus-admission-controller-857f4d67dd-s9g5t\" (UID: \"837c103b-8351-4e7d-b00b-1b51640a1dd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.704324 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wcs7\" (UniqueName: \"kubernetes.io/projected/9170d425-7fae-40d9-aab0-7d2afbaa56e4-kube-api-access-5wcs7\") pod \"downloads-7954f5f757-k9qc2\" (UID: \"9170d425-7fae-40d9-aab0-7d2afbaa56e4\") " pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.704519 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpz9h\" (UniqueName: \"kubernetes.io/projected/fd504091-d7b4-4355-b17f-85eedfbb4731-kube-api-access-gpz9h\") pod \"packageserver-d55dfcdfc-gwj6x\" (UID: \"fd504091-d7b4-4355-b17f-85eedfbb4731\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.704844 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mxtw\" (UniqueName: \"kubernetes.io/projected/f3ee1c28-9784-4602-949b-c877f1c71adf-kube-api-access-2mxtw\") pod \"service-ca-operator-777779d784-ddprm\" (UID: \"f3ee1c28-9784-4602-949b-c877f1c71adf\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.705259 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.706690 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f140c3a7-856d-4cda-87f0-cf7aff2c1c7d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5cxxd\" (UID: \"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.706816 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbs8j\" (UniqueName: \"kubernetes.io/projected/b52e759c-d09d-42b3-b2c9-a9760a69ef5e-kube-api-access-zbs8j\") pod \"olm-operator-6b444d44fb-d64jc\" (UID: \"b52e759c-d09d-42b3-b2c9-a9760a69ef5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.707530 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwj4k\" (UniqueName: \"kubernetes.io/projected/9fafefcc-1420-4d16-a98e-56e31a3864ad-kube-api-access-cwj4k\") pod \"router-default-5444994796-nkhfk\" (UID: \"9fafefcc-1420-4d16-a98e-56e31a3864ad\") " pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.708141 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" event={"ID":"c0c87798-bf92-4257-95df-a5aa1c305994","Type":"ContainerStarted","Data":"a01b8c3db37bc11834fd1f83b4446f1d35b7fe2915e877df28a3cd128201530d"} Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.711233 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" event={"ID":"af89d20b-5709-4b8c-ade4-16f111487525","Type":"ContainerStarted","Data":"d8441efbf251acb971699bfc4510d510987062d87021e461cbb8ac6e2d5b8aa5"} Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.714398 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmld8\" (UniqueName: \"kubernetes.io/projected/823bb36f-f372-441d-a483-85603bbac215-kube-api-access-xmld8\") pod \"csi-hostpathplugin-6mnpz\" (UID: \"823bb36f-f372-441d-a483-85603bbac215\") " pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.715709 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtlj5\" (UniqueName: \"kubernetes.io/projected/839d69e8-399b-4c30-b64f-893327a389e7-kube-api-access-xtlj5\") pod \"console-f9d7485db-ptzkb\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.720103 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.730505 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc49q\" (UniqueName: \"kubernetes.io/projected/5485f8d6-493f-4b42-88a6-363043c13a90-kube-api-access-fc49q\") pod \"marketplace-operator-79b997595-5c4b8\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.734132 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" event={"ID":"92434bb5-42fe-4d2e-8a9e-0517b46c6774","Type":"ContainerStarted","Data":"8f098023b638659e65bc018d410831729dcfcd073255b7bfade8fb180528b23d"} Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.736989 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" event={"ID":"0e6164df-7beb-4da5-8853-a0785999d30c","Type":"ContainerStarted","Data":"08e27b4abc1737c15fbe178af1addd7230194446dd07dbe9ea9b4b14c75ad1e5"} Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.737876 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" event={"ID":"fffc4838-3f04-4867-b948-b40f642203de","Type":"ContainerStarted","Data":"5753a5b7cb602e932561a7e7c5795c49d3018f9a179d71c6663b4ccb0b3898b2"} Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.740939 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.768730 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.775525 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kcpds" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.801478 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:03 crc kubenswrapper[4822]: E1201 06:53:03.801831 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.301812444 +0000 UTC m=+139.622620130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.897389 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.902186 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:03 crc kubenswrapper[4822]: E1201 06:53:03.902481 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.4024705 +0000 UTC m=+139.723278186 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.917137 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.933364 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.939164 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.947639 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.953177 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.966971 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.973407 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.980808 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.986013 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.992711 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" Dec 01 06:53:03 crc kubenswrapper[4822]: I1201 06:53:03.998862 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.003713 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.003943 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.503926781 +0000 UTC m=+139.824734467 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.013336 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.020427 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hpr4x"] Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.061237 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6nbbk"] Dec 01 06:53:04 crc kubenswrapper[4822]: W1201 06:53:04.080213 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2a429db_67df_45d5_81d0_8077eb96b9e4.slice/crio-e46d3e039016e4149ed14970a589f0570ab313ca0657543392b8ec294ed85bb6 WatchSource:0}: Error finding container e46d3e039016e4149ed14970a589f0570ab313ca0657543392b8ec294ed85bb6: Status 404 returned error can't find the container with id e46d3e039016e4149ed14970a589f0570ab313ca0657543392b8ec294ed85bb6 Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.104643 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.104991 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.60497831 +0000 UTC m=+139.925785986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.206534 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.206920 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.706901695 +0000 UTC m=+140.027709381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.307776 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.308164 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.808148059 +0000 UTC m=+140.128955745 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.356217 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb"] Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.383799 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4j7rj"] Dec 01 06:53:04 crc kubenswrapper[4822]: W1201 06:53:04.400358 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d0849c6_b1d6_4173_8a67_e97369b1cf56.slice/crio-10279c02a0e3d6ff082ca85d5700f982c747c64c903d811ec9c90b292ebf3d26 WatchSource:0}: Error finding container 10279c02a0e3d6ff082ca85d5700f982c747c64c903d811ec9c90b292ebf3d26: Status 404 returned error can't find the container with id 10279c02a0e3d6ff082ca85d5700f982c747c64c903d811ec9c90b292ebf3d26 Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.408203 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.408482 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:04.908467815 +0000 UTC m=+140.229275501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.508909 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.509234 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.009214714 +0000 UTC m=+140.330022400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.610063 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.610790 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.110773398 +0000 UTC m=+140.431581084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.720960 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.721349 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.221332697 +0000 UTC m=+140.542140383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.775051 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" event={"ID":"1d0849c6-b1d6-4173-8a67-e97369b1cf56","Type":"ContainerStarted","Data":"10279c02a0e3d6ff082ca85d5700f982c747c64c903d811ec9c90b292ebf3d26"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.777937 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-9lnf6" event={"ID":"a2a429db-67df-45d5-81d0-8077eb96b9e4","Type":"ContainerStarted","Data":"e46d3e039016e4149ed14970a589f0570ab313ca0657543392b8ec294ed85bb6"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.778940 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nkhfk" event={"ID":"9fafefcc-1420-4d16-a98e-56e31a3864ad","Type":"ContainerStarted","Data":"f215203f5561d10a26e2e31c72e24a15ab52bcf3c2744615b1cbf58407cbada8"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.780489 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" event={"ID":"1645e90d-8ddd-4df9-82cd-e50edcac62ad","Type":"ContainerStarted","Data":"82dc8248c68cec7798b3f8b21b25a851013ef4aa763fdc9d1723505b11e9df40"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.792990 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" event={"ID":"0e6164df-7beb-4da5-8853-a0785999d30c","Type":"ContainerStarted","Data":"b2da24aed8affd2ef99895249bea05adaa4ea078a0598c6bb1a939fabd46c9fc"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.802589 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" event={"ID":"28bcb0b1-e632-4b21-8771-e41772fe28f8","Type":"ContainerStarted","Data":"a3a40b557dbe1f9fc4928195807a3316cab040904a84f6be0899106a6515d5ca"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.810290 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" event={"ID":"0126cc59-9259-43c1-91ea-cdc05047bbee","Type":"ContainerStarted","Data":"568042114d3bb800f3d119bbc8978c85b316fb2bb3fd3bd21e28f74303ab6066"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.815531 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" event={"ID":"0a22c90f-806d-43a3-b68a-321f5763b64c","Type":"ContainerStarted","Data":"12ef49897d6dfb7ded1b9e9c44671576b67068315c1bfa0ac8a43dec1c898493"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.817765 4822 generic.go:334] "Generic (PLEG): container finished" podID="af89d20b-5709-4b8c-ade4-16f111487525" containerID="448d857f9cfdb280214d6e0d1192f705d211ea420fe9dbac009069875d23869d" exitCode=0 Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.817821 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" event={"ID":"af89d20b-5709-4b8c-ade4-16f111487525","Type":"ContainerDied","Data":"448d857f9cfdb280214d6e0d1192f705d211ea420fe9dbac009069875d23869d"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.820880 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" event={"ID":"e411fb84-4e27-4611-80b5-9fd52e71441e","Type":"ContainerStarted","Data":"468df8b33e61376df04aada035efdda180dcc306654377d5c6dc290e99a19ea9"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.821421 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.821726 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.321699054 +0000 UTC m=+140.642506810 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.821929 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.822609 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.322592682 +0000 UTC m=+140.643400368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.823140 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" event={"ID":"92434bb5-42fe-4d2e-8a9e-0517b46c6774","Type":"ContainerStarted","Data":"806c5ce2c3f4e214bb3d0965d0470ba3de8e1010599c3f0019c59d0946bcf653"} Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.928147 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.928485 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.428463497 +0000 UTC m=+140.749271183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:04 crc kubenswrapper[4822]: I1201 06:53:04.928679 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:04 crc kubenswrapper[4822]: E1201 06:53:04.930291 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.430276473 +0000 UTC m=+140.751084159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.016612 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-j6lgc" podStartSLOduration=121.016545769 podStartE2EDuration="2m1.016545769s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.009934017 +0000 UTC m=+140.330741703" watchObservedRunningTime="2025-12-01 06:53:05.016545769 +0000 UTC m=+140.337353445" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.030601 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.030908 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.530883618 +0000 UTC m=+140.851691304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.030995 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.031350 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.531343712 +0000 UTC m=+140.852151398 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.103529 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qhchn" podStartSLOduration=120.103510367 podStartE2EDuration="2m0.103510367s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.102987881 +0000 UTC m=+140.423795567" watchObservedRunningTime="2025-12-01 06:53:05.103510367 +0000 UTC m=+140.424318053" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.104797 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9lk5l" podStartSLOduration=120.104792046 podStartE2EDuration="2m0.104792046s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.061002388 +0000 UTC m=+140.381810084" watchObservedRunningTime="2025-12-01 06:53:05.104792046 +0000 UTC m=+140.425599732" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.131753 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.131976 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.631952846 +0000 UTC m=+140.952760542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.132130 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.133927 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.633912256 +0000 UTC m=+140.954719942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.233416 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.233826 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.73381088 +0000 UTC m=+141.054618556 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.335423 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.335856 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.835837228 +0000 UTC m=+141.156644964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.426373 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.436722 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.437057 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:05.937042711 +0000 UTC m=+141.257850397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.440204 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dlh8x"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.443076 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7"] Dec 01 06:53:05 crc kubenswrapper[4822]: W1201 06:53:05.470899 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c1f05f0_aa0d_48fa_a6de_04c19783f5c2.slice/crio-f4036dfbc893495378008daf7e30f7b32311cb13348cd809b7cea75f47f043f7 WatchSource:0}: Error finding container f4036dfbc893495378008daf7e30f7b32311cb13348cd809b7cea75f47f043f7: Status 404 returned error can't find the container with id f4036dfbc893495378008daf7e30f7b32311cb13348cd809b7cea75f47f043f7 Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.482810 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.539057 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.539390 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.039378698 +0000 UTC m=+141.360186384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.542510 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x"] Dec 01 06:53:05 crc kubenswrapper[4822]: W1201 06:53:05.559265 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdaf1821_e93f_499d_b3d2_dca3ac09fa8e.slice/crio-b86273929b7548e94d37af8ff31003cb7ae413d6146ca201ecbac1b966dbe4f4 WatchSource:0}: Error finding container b86273929b7548e94d37af8ff31003cb7ae413d6146ca201ecbac1b966dbe4f4: Status 404 returned error can't find the container with id b86273929b7548e94d37af8ff31003cb7ae413d6146ca201ecbac1b966dbe4f4 Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.576544 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9"] Dec 01 06:53:05 crc kubenswrapper[4822]: W1201 06:53:05.581207 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd504091_d7b4_4355_b17f_85eedfbb4731.slice/crio-155defb9745aadc8456227c615ebac52a539cd02c26b8e51fdd5f3778ba05f28 WatchSource:0}: Error finding container 155defb9745aadc8456227c615ebac52a539cd02c26b8e51fdd5f3778ba05f28: Status 404 returned error can't find the container with id 155defb9745aadc8456227c615ebac52a539cd02c26b8e51fdd5f3778ba05f28 Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.587376 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.601245 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k"] Dec 01 06:53:05 crc kubenswrapper[4822]: W1201 06:53:05.601311 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee914768_03f2_4378_9db1_02fdc1b8d048.slice/crio-a55ab7f57481b08e36dbe45e0d1d57d7f8a17dee417dc1f6f118f0cb593ffdcd WatchSource:0}: Error finding container a55ab7f57481b08e36dbe45e0d1d57d7f8a17dee417dc1f6f118f0cb593ffdcd: Status 404 returned error can't find the container with id a55ab7f57481b08e36dbe45e0d1d57d7f8a17dee417dc1f6f118f0cb593ffdcd Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.623533 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.652507 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.652586 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.152561348 +0000 UTC m=+141.473369034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.654827 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.655507 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.155486857 +0000 UTC m=+141.476294543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.754494 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.755981 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.756410 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.256394281 +0000 UTC m=+141.577201967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.836741 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-9lnf6" event={"ID":"a2a429db-67df-45d5-81d0-8077eb96b9e4","Type":"ContainerStarted","Data":"a4c45e213c315d394f738b84ce7569b237aa4262b46889028bb8e30439af3caa"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.846314 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" event={"ID":"28bcb0b1-e632-4b21-8771-e41772fe28f8","Type":"ContainerStarted","Data":"d5e9233bdb35adc9448cf0811ede09cdbabe49df0f6c2556ab02ecc22a82862a"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.846649 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.849386 4822 patch_prober.go:28] interesting pod/console-operator-58897d9998-hpr4x container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.849424 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" podUID="28bcb0b1-e632-4b21-8771-e41772fe28f8" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/readyz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.854333 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" event={"ID":"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2","Type":"ContainerStarted","Data":"f4036dfbc893495378008daf7e30f7b32311cb13348cd809b7cea75f47f043f7"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.857090 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.858176 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.358164141 +0000 UTC m=+141.678971827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.866140 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" event={"ID":"fee44223-9d68-4b77-807a-a806cd0e842f","Type":"ContainerStarted","Data":"10391f8dba970a29e4982db0a0275f3b109859b2f6daa965e80a6096cac44bb2"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.869103 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" event={"ID":"ee914768-03f2-4378-9db1-02fdc1b8d048","Type":"ContainerStarted","Data":"a55ab7f57481b08e36dbe45e0d1d57d7f8a17dee417dc1f6f118f0cb593ffdcd"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.883182 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-9lnf6" podStartSLOduration=5.883164785 podStartE2EDuration="5.883164785s" podCreationTimestamp="2025-12-01 06:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.865510256 +0000 UTC m=+141.186317962" watchObservedRunningTime="2025-12-01 06:53:05.883164785 +0000 UTC m=+141.203972471" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.884847 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" event={"ID":"af89d20b-5709-4b8c-ade4-16f111487525","Type":"ContainerStarted","Data":"1a1437210cd49fde13c579bee5bcf19f8b5666c86deb55bbd6df21b0107f19ef"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.888645 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kcpds"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.894704 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.905645 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" podStartSLOduration=121.905628612 podStartE2EDuration="2m1.905628612s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.897230215 +0000 UTC m=+141.218037901" watchObservedRunningTime="2025-12-01 06:53:05.905628612 +0000 UTC m=+141.226436298" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.905882 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-k9qc2"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.918702 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" event={"ID":"1d0849c6-b1d6-4173-8a67-e97369b1cf56","Type":"ContainerStarted","Data":"6194b3f99dd08a1690a6c7ee789f42d0ab9d71506d26722b44c7282281c7a528"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.922762 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-ddprm"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.923440 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nkhfk" event={"ID":"9fafefcc-1420-4d16-a98e-56e31a3864ad","Type":"ContainerStarted","Data":"89df6c5e868fed773c12f5da6bfd22e7800cd1918876f4c4884b8b616b7fcac8"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.937953 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.951596 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" podStartSLOduration=121.951571396 podStartE2EDuration="2m1.951571396s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.940036994 +0000 UTC m=+141.260844710" watchObservedRunningTime="2025-12-01 06:53:05.951571396 +0000 UTC m=+141.272379082" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.953151 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6"] Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.953343 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" event={"ID":"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b","Type":"ContainerStarted","Data":"1e7932267dde75408cefee0f6a34538b6e5c5a8b26d183164c3858a68cbe1113"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.964898 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.964979 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.464959595 +0000 UTC m=+141.785767281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.965293 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:05 crc kubenswrapper[4822]: E1201 06:53:05.967613 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.467601606 +0000 UTC m=+141.788409292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.981834 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.983611 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nkhfk" podStartSLOduration=120.983598595 podStartE2EDuration="2m0.983598595s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:05.979323164 +0000 UTC m=+141.300130850" watchObservedRunningTime="2025-12-01 06:53:05.983598595 +0000 UTC m=+141.304406281" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.985652 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:05 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:05 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:05 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.985693 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.986077 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" event={"ID":"0126cc59-9259-43c1-91ea-cdc05047bbee","Type":"ContainerStarted","Data":"5a4aaf69e45cb7124b52f2fd9c1c541dccbfbdc33aeab49424019355fa5c3648"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.986115 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" event={"ID":"0126cc59-9259-43c1-91ea-cdc05047bbee","Type":"ContainerStarted","Data":"d15aa29c0879b7c1f2c3efeeaeeb14b9b4808510485730ca2dd6ce1db0f215fc"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.989047 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" event={"ID":"1645e90d-8ddd-4df9-82cd-e50edcac62ad","Type":"ContainerStarted","Data":"6453684b697c671b2f27e671f90972e4199d06c4548e06346d4724e01766c66d"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.990867 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" event={"ID":"47aa0319-23cc-454f-9715-4c1f61d8009d","Type":"ContainerStarted","Data":"d525d489d4b33f5f784eb026e36d2ea7b5eab2094a3c28d8d694745fb7a0e10a"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.992138 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" event={"ID":"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e","Type":"ContainerStarted","Data":"b86273929b7548e94d37af8ff31003cb7ae413d6146ca201ecbac1b966dbe4f4"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.994102 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" event={"ID":"c0c87798-bf92-4257-95df-a5aa1c305994","Type":"ContainerStarted","Data":"0169bae94b2b12d14f1acf3edc3f4b499b40f53dea483dec657351819e85cb90"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.998341 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" event={"ID":"218e0dbf-c5f6-405f-a22f-31b6f0f25d60","Type":"ContainerStarted","Data":"7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.998400 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" event={"ID":"218e0dbf-c5f6-405f-a22f-31b6f0f25d60","Type":"ContainerStarted","Data":"e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb"} Dec 01 06:53:05 crc kubenswrapper[4822]: I1201 06:53:05.999408 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.001862 4822 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9sr2j container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.001927 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" podUID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.006425 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" podStartSLOduration=121.006408392 podStartE2EDuration="2m1.006408392s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:06.004222345 +0000 UTC m=+141.325030051" watchObservedRunningTime="2025-12-01 06:53:06.006408392 +0000 UTC m=+141.327216078" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.009337 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" event={"ID":"0a22c90f-806d-43a3-b68a-321f5763b64c","Type":"ContainerStarted","Data":"50331ed91694a4c52ac0cfebcb6daf185bb9943d7ea896cdd389ef040c7e1388"} Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.010866 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" event={"ID":"0416c999-c003-42ff-8c6e-30357b23975d","Type":"ContainerStarted","Data":"e63b5177ec47c93737a50a693581bf75ac2b47002b040b734d3a3225232a5ca7"} Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.011754 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" event={"ID":"fd504091-d7b4-4355-b17f-85eedfbb4731","Type":"ContainerStarted","Data":"155defb9745aadc8456227c615ebac52a539cd02c26b8e51fdd5f3778ba05f28"} Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.012924 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" event={"ID":"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8","Type":"ContainerStarted","Data":"895d83c118b3998a68e326cd5dfa7e114d6bd9e48ba811d5765c94c5ebc41672"} Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.022778 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-j7hbz" podStartSLOduration=121.022758962 podStartE2EDuration="2m1.022758962s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:06.017921344 +0000 UTC m=+141.338729030" watchObservedRunningTime="2025-12-01 06:53:06.022758962 +0000 UTC m=+141.343566648" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.042310 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" event={"ID":"fffc4838-3f04-4867-b948-b40f642203de","Type":"ContainerStarted","Data":"c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca"} Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.044541 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.051620 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-6nbbk" podStartSLOduration=121.051602543 podStartE2EDuration="2m1.051602543s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:06.042186745 +0000 UTC m=+141.362994431" watchObservedRunningTime="2025-12-01 06:53:06.051602543 +0000 UTC m=+141.372410229" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.066315 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.067875 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd"] Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.068272 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.568256572 +0000 UTC m=+141.889064258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.120079 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" podStartSLOduration=121.120059485 podStartE2EDuration="2m1.120059485s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:06.066043755 +0000 UTC m=+141.386851441" watchObservedRunningTime="2025-12-01 06:53:06.120059485 +0000 UTC m=+141.440867171" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.145643 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-f87b4"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.154885 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.156710 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nmx5d"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.165405 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t4mcw"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.165471 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5c4b8"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.168244 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-ptzkb"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.169169 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.169467 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.669455515 +0000 UTC m=+141.990263201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.169621 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6mnpz"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.174676 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.177199 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.181260 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" podStartSLOduration=122.181241235 podStartE2EDuration="2m2.181241235s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:06.104657935 +0000 UTC m=+141.425465621" watchObservedRunningTime="2025-12-01 06:53:06.181241235 +0000 UTC m=+141.502048921" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.193338 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.194854 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s9g5t"] Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.270188 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.270360 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.770305197 +0000 UTC m=+142.091112883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.270932 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.271457 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.771447662 +0000 UTC m=+142.092255348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: W1201 06:53:06.301740 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb52e759c_d09d_42b3_b2c9_a9760a69ef5e.slice/crio-a9c13f39081b5ceab5fdabaf58c3f5cbcbad43184c9b424b8a46ce1c3b07651a WatchSource:0}: Error finding container a9c13f39081b5ceab5fdabaf58c3f5cbcbad43184c9b424b8a46ce1c3b07651a: Status 404 returned error can't find the container with id a9c13f39081b5ceab5fdabaf58c3f5cbcbad43184c9b424b8a46ce1c3b07651a Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.373834 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.374512 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.874486871 +0000 UTC m=+142.195294557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.475506 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.489350 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:06.989313561 +0000 UTC m=+142.310121247 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.578143 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.578316 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.07829126 +0000 UTC m=+142.399098946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.578386 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.578716 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.078705593 +0000 UTC m=+142.399513279 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.679100 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.691474 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.191409417 +0000 UTC m=+142.512217103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.750454 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.784205 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.784752 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.284726789 +0000 UTC m=+142.605534475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.885241 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.886301 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.386258142 +0000 UTC m=+142.707065838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.988916 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:06 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:06 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:06 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.988961 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:06 crc kubenswrapper[4822]: I1201 06:53:06.989884 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:06 crc kubenswrapper[4822]: E1201 06:53:06.990310 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.490291352 +0000 UTC m=+142.811099038 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.072562 4822 generic.go:334] "Generic (PLEG): container finished" podID="0416c999-c003-42ff-8c6e-30357b23975d" containerID="f5f017375ec950684e54512ddfca6e0d5d6a927d745e64cc8a622ddd10312e4f" exitCode=0 Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.072844 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" event={"ID":"0416c999-c003-42ff-8c6e-30357b23975d","Type":"ContainerDied","Data":"f5f017375ec950684e54512ddfca6e0d5d6a927d745e64cc8a622ddd10312e4f"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.075050 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" event={"ID":"f3ee1c28-9784-4602-949b-c877f1c71adf","Type":"ContainerStarted","Data":"6ad67295f690cb84dc67e5fa6a34f719318bd1a6eaf32679ea980f974769dc54"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.093177 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-k9qc2" event={"ID":"9170d425-7fae-40d9-aab0-7d2afbaa56e4","Type":"ContainerStarted","Data":"d4a0748ec5a750a79fee70dd33449e4f28628580d849393fb81df6ba3d3d91bc"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.093246 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-k9qc2" event={"ID":"9170d425-7fae-40d9-aab0-7d2afbaa56e4","Type":"ContainerStarted","Data":"9e474ee03623cd8b44fd5c9870d57cab3764b7bbd1e3034d6dde739a4dcb643b"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.095012 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.095345 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.096590 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.596540999 +0000 UTC m=+142.917348685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.098861 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.098936 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.112375 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" event={"ID":"1d0849c6-b1d6-4173-8a67-e97369b1cf56","Type":"ContainerStarted","Data":"dd0bc02ccaaddc1caaec444165164f648a78a3cb60837152bae304cf648e41dd"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.139945 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-k9qc2" podStartSLOduration=122.139913554 podStartE2EDuration="2m2.139913554s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.138983306 +0000 UTC m=+142.459791002" watchObservedRunningTime="2025-12-01 06:53:07.139913554 +0000 UTC m=+142.460721230" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.185076 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" event={"ID":"fee44223-9d68-4b77-807a-a806cd0e842f","Type":"ContainerStarted","Data":"ef4d92e58fa67cd15a7ef9f72b57201349c29113f763b2cf3b40a33ba3e15130"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.195317 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4dl8p" podStartSLOduration=123.195280827 podStartE2EDuration="2m3.195280827s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.164110144 +0000 UTC m=+142.484917830" watchObservedRunningTime="2025-12-01 06:53:07.195280827 +0000 UTC m=+142.516088513" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.197443 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.200285 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.700267419 +0000 UTC m=+143.021075105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.214163 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kcpds" event={"ID":"fdf3b502-078c-46b2-8d35-40ae4a29cbd1","Type":"ContainerStarted","Data":"c17ff423b6e6a46593a5afa8762d4faa59d60ab426ab67a4ebc0c105dcbc38ae"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.214224 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kcpds" event={"ID":"fdf3b502-078c-46b2-8d35-40ae4a29cbd1","Type":"ContainerStarted","Data":"f68ed0eaa23e69bab56042067329b04a5af670f7f8baafa3845b65dea236905a"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.260075 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-kcpds" podStartSLOduration=7.260058836 podStartE2EDuration="7.260058836s" podCreationTimestamp="2025-12-01 06:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.25852815 +0000 UTC m=+142.579335836" watchObservedRunningTime="2025-12-01 06:53:07.260058836 +0000 UTC m=+142.580866522" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.263895 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" event={"ID":"1ef340e9-1eee-41e6-802f-4ee1768acf70","Type":"ContainerStarted","Data":"31d7031e6331e0001306d20a737c331c81f587170373943468cdb584066982a3"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.304758 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.307173 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.807148765 +0000 UTC m=+143.127956451 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.317057 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ptzkb" event={"ID":"839d69e8-399b-4c30-b64f-893327a389e7","Type":"ContainerStarted","Data":"9ec9e48c7b095e820c45dff56d8ca643b8fa7d596bf79ba6f35a98f0e04bfcf0"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.323792 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" event={"ID":"47aa0319-23cc-454f-9715-4c1f61d8009d","Type":"ContainerStarted","Data":"adf27b49aa669ba75bca9eca702a18afcf757e2c029d401b2ebcdb98d3be2822"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.324163 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.350489 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" podStartSLOduration=122.35047182 podStartE2EDuration="2m2.35047182s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.349524191 +0000 UTC m=+142.670331877" watchObservedRunningTime="2025-12-01 06:53:07.35047182 +0000 UTC m=+142.671279506" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.361725 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-q4c6k" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.367912 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" event={"ID":"1645e90d-8ddd-4df9-82cd-e50edcac62ad","Type":"ContainerStarted","Data":"adfce81044a6d9a65c9c2303e4f34873c85ad1ec4da6840745324b3c4cde52ae"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.389502 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" event={"ID":"bab12393-6d6a-459b-8054-0934d99ffd0f","Type":"ContainerStarted","Data":"02aab337acae2d967f5ab039e10365cce5fdb2720855b4d787e16ae86097319c"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.413813 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.415781 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:07.915752054 +0000 UTC m=+143.236559740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.418454 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" event={"ID":"fd504091-d7b4-4355-b17f-85eedfbb4731","Type":"ContainerStarted","Data":"2654121fcd0f5ec6dc060498f700440474e263b573fa1eff90470107154012f5"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.419067 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.485268 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-4j7rj" podStartSLOduration=122.485211237 podStartE2EDuration="2m2.485211237s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.470116655 +0000 UTC m=+142.790924331" watchObservedRunningTime="2025-12-01 06:53:07.485211237 +0000 UTC m=+142.806018923" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.489736 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" event={"ID":"4570adbe-e705-4cbe-aecb-b27a3b930048","Type":"ContainerStarted","Data":"f55a01453fda6e34ee70ea37d7806295ec2deac5ca7697dff7f91cc5c9363358"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.497674 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.497984 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.515278 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.515707 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.015671848 +0000 UTC m=+143.336479534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.515860 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.517311 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.017281467 +0000 UTC m=+143.338089343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.527004 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" event={"ID":"837c103b-8351-4e7d-b00b-1b51640a1dd2","Type":"ContainerStarted","Data":"ac3164b6e25591f0ee6164ac187afbfa890a9cf7bea6cbbf5e1ae7b56c0065eb"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.539060 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.568091 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" event={"ID":"ee914768-03f2-4378-9db1-02fdc1b8d048","Type":"ContainerStarted","Data":"2f39ae2b96658679301c7258a2d7df63a32b298a66521cc5a8e32e6b99ea3466"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.594523 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" event={"ID":"fdaf1821-e93f-499d-b3d2-dca3ac09fa8e","Type":"ContainerStarted","Data":"d25511876bd17bd2a18cabc6a013c9fd0984e08e1890d906830cc344cf7a517a"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.600173 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" podStartSLOduration=122.600145689 podStartE2EDuration="2m2.600145689s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.514469851 +0000 UTC m=+142.835277537" watchObservedRunningTime="2025-12-01 06:53:07.600145689 +0000 UTC m=+142.920953375" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.616921 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.617299 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.117263432 +0000 UTC m=+143.438071118 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.641463 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" event={"ID":"0a22c90f-806d-43a3-b68a-321f5763b64c","Type":"ContainerStarted","Data":"7a844cfe844190979260f56a71c4f029f69e7451aad0eeaddd68b53b70ec4dec"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.641647 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-c2l47" podStartSLOduration=123.641626717 podStartE2EDuration="2m3.641626717s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.640168412 +0000 UTC m=+142.960976108" watchObservedRunningTime="2025-12-01 06:53:07.641626717 +0000 UTC m=+142.962434423" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.676053 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" event={"ID":"823bb36f-f372-441d-a483-85603bbac215","Type":"ContainerStarted","Data":"ce878615e1ded6db9bf8883d0819bc6bb3e9b1ec954bf9ec9b8d4d48bd5c5b38"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.684713 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" event={"ID":"282e7604-ab9e-4255-a9a2-bc32c090ecd5","Type":"ContainerStarted","Data":"35ea69016df8512af2616ffc1cd890e9fd630f43f7f0c5d952de4067238e87ee"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.684774 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" event={"ID":"282e7604-ab9e-4255-a9a2-bc32c090ecd5","Type":"ContainerStarted","Data":"ad7b5d0660a68480f1bfb30c29d38cbafe54d46301e941fb8cf153a146c5d635"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.687286 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" event={"ID":"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2","Type":"ContainerStarted","Data":"0919d5049df538c888b405979ba43e6793a4eeaf631c4b79997d1d08d77387bb"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.693848 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-spbn2" podStartSLOduration=122.693812192 podStartE2EDuration="2m2.693812192s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.693049629 +0000 UTC m=+143.013857315" watchObservedRunningTime="2025-12-01 06:53:07.693812192 +0000 UTC m=+143.014619878" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.711861 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" event={"ID":"b52e759c-d09d-42b3-b2c9-a9760a69ef5e","Type":"ContainerStarted","Data":"a9c13f39081b5ceab5fdabaf58c3f5cbcbad43184c9b424b8a46ce1c3b07651a"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.712887 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.718165 4822 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-d64jc container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.718208 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" podUID="b52e759c-d09d-42b3-b2c9-a9760a69ef5e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.719001 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.719336 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.219326622 +0000 UTC m=+143.540134308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.739795 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-d4kcb" podStartSLOduration=122.739776477 podStartE2EDuration="2m2.739776477s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.738009623 +0000 UTC m=+143.058817309" watchObservedRunningTime="2025-12-01 06:53:07.739776477 +0000 UTC m=+143.060584163" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.741754 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" event={"ID":"7ea02c2f-7f45-4a40-bedf-743a3adf7fb8","Type":"ContainerStarted","Data":"2462dda2062f277cd3b54426ba3a2ab1c1a37642ed190532172c6076109bf7d7"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.761154 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" event={"ID":"6577ee23-bf0c-4452-adfa-40e6b6b0678c","Type":"ContainerStarted","Data":"faf0a70a77eba62f84c68370e0675d95166308ff15ebd524803a75bf90b8ae0a"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.762542 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" event={"ID":"ca838455-398e-4c35-844e-201e3fe65b5d","Type":"ContainerStarted","Data":"fbbcd34ffd2404d77abf7a1edf9ce8a45077c6416f1eada0cbb27f31c1b4caef"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.762586 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" event={"ID":"ca838455-398e-4c35-844e-201e3fe65b5d","Type":"ContainerStarted","Data":"062550fed448a443a3f9f35a11b1d38a5a71af376355d48ad9216cafb4df0b19"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.777101 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" event={"ID":"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b","Type":"ContainerStarted","Data":"60b46d79d4c5b032cec9d4adb89e75b56fec02e2845ec2e65a8f198348b79c15"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.781087 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" event={"ID":"27ab4099-8ed2-4115-92f7-07109c42430f","Type":"ContainerStarted","Data":"0d6932053bff2daab491d89ac22dcca2ad06f0a81a859e5ed895ba2cde4c9e92"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.781157 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" event={"ID":"27ab4099-8ed2-4115-92f7-07109c42430f","Type":"ContainerStarted","Data":"4b145f21e0242656f2a4e7730d477ceae5df7ba437b33661b140ff19cbeab6db"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.782518 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" event={"ID":"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d","Type":"ContainerStarted","Data":"f91fde5954a80d3fb73f1a69d3f3035ef43c4d02473f5f93147db5965cc21c61"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.800366 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f87b4" event={"ID":"03c39fe9-7006-45af-8c8f-6f970b3a9656","Type":"ContainerStarted","Data":"fd138b87c83922f998b793d10fdba36de6802b8d794f6025cf3311d5828328e0"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.802604 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8rkch" podStartSLOduration=122.802584696 podStartE2EDuration="2m2.802584696s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.787745953 +0000 UTC m=+143.108553649" watchObservedRunningTime="2025-12-01 06:53:07.802584696 +0000 UTC m=+143.123392392" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.804483 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" event={"ID":"1fd76610-c79c-4c82-8b29-9038aa0ac22c","Type":"ContainerStarted","Data":"eb6424f123338b13e9742482bda7674f209573e647ccdc8d573c13f0484d348b"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.804525 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" event={"ID":"1fd76610-c79c-4c82-8b29-9038aa0ac22c","Type":"ContainerStarted","Data":"8eef9975e4bfe8e369064f73d68bc216f1685cbf6c8194ff8a6f1d0dc84bad37"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.815262 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" event={"ID":"5485f8d6-493f-4b42-88a6-363043c13a90","Type":"ContainerStarted","Data":"8906fa032c8ac7c9ec5103b758f19b779eec1c86e855adbe539542e6867c9849"} Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.824747 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.827137 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.327085785 +0000 UTC m=+143.647893471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.834137 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.840084 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-dlh8x" podStartSLOduration=122.840060732 podStartE2EDuration="2m2.840060732s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.830822619 +0000 UTC m=+143.151630305" watchObservedRunningTime="2025-12-01 06:53:07.840060732 +0000 UTC m=+143.160868418" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.842692 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ttrnv" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.855959 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-hpr4x" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.861514 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" podStartSLOduration=122.861487627 podStartE2EDuration="2m2.861487627s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.860875818 +0000 UTC m=+143.181683504" watchObservedRunningTime="2025-12-01 06:53:07.861487627 +0000 UTC m=+143.182295313" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.904701 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fwgk6" podStartSLOduration=122.904682177 podStartE2EDuration="2m2.904682177s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.904045527 +0000 UTC m=+143.224853223" watchObservedRunningTime="2025-12-01 06:53:07.904682177 +0000 UTC m=+143.225489863" Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.937697 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:07 crc kubenswrapper[4822]: E1201 06:53:07.938186 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.43816887 +0000 UTC m=+143.758976556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:07 crc kubenswrapper[4822]: I1201 06:53:07.958530 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" podStartSLOduration=122.958516132 podStartE2EDuration="2m2.958516132s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:07.957995276 +0000 UTC m=+143.278802962" watchObservedRunningTime="2025-12-01 06:53:07.958516132 +0000 UTC m=+143.279323818" Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.031264 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:08 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:08 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:08 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.034923 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.042328 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.042624 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.542590111 +0000 UTC m=+143.863397797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.042957 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.043436 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.543411266 +0000 UTC m=+143.864218952 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.142457 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gwj6x" Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.143992 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.144300 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.6442863 +0000 UTC m=+143.965093986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.245034 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.245781 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.745763051 +0000 UTC m=+144.066570737 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.423122 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.423472 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:08.923457462 +0000 UTC m=+144.244265148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.601923 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.602497 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.102476853 +0000 UTC m=+144.423284739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.709542 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.710020 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.209998839 +0000 UTC m=+144.530806525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.812367 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.812740 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.312725999 +0000 UTC m=+144.633533685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.875907 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" podStartSLOduration=123.875874499 podStartE2EDuration="2m3.875874499s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:08.792573153 +0000 UTC m=+144.113380839" watchObservedRunningTime="2025-12-01 06:53:08.875874499 +0000 UTC m=+144.196682185" Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.876790 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dpb7m" podStartSLOduration=123.876784506 podStartE2EDuration="2m3.876784506s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:08.676390862 +0000 UTC m=+143.997198548" watchObservedRunningTime="2025-12-01 06:53:08.876784506 +0000 UTC m=+144.197592192" Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.918620 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:08 crc kubenswrapper[4822]: E1201 06:53:08.918935 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.418921284 +0000 UTC m=+144.739728970 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.937241 4822 generic.go:334] "Generic (PLEG): container finished" podID="bab12393-6d6a-459b-8054-0934d99ffd0f" containerID="0768b91884490a397878459896c38ad086c3dbab7d2bd0fdcb7318369a10d68f" exitCode=0 Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.937364 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" event={"ID":"bab12393-6d6a-459b-8054-0934d99ffd0f","Type":"ContainerDied","Data":"0768b91884490a397878459896c38ad086c3dbab7d2bd0fdcb7318369a10d68f"} Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.976225 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" event={"ID":"1ef340e9-1eee-41e6-802f-4ee1768acf70","Type":"ContainerStarted","Data":"a900f8e582770fb6260a006fd48f084a9e4872225c703f32584b93343be4df73"} Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.978851 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" event={"ID":"fee44223-9d68-4b77-807a-a806cd0e842f","Type":"ContainerStarted","Data":"6cb8e064544725fbb1f48bc1f96a69e52275438c622f04988e38135a0b488816"} Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.979992 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.993771 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kwtsp"] Dec 01 06:53:08 crc kubenswrapper[4822]: I1201 06:53:08.994706 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.001194 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" event={"ID":"27ab4099-8ed2-4115-92f7-07109c42430f","Type":"ContainerStarted","Data":"c9d2839cb33495d3d5e483e7bdb1a3d4e187b135b86cb9b7843c7ecd4d306e57"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.003736 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" event={"ID":"1dd64fd6-9a41-4dd3-b8b8-e8fd1f27270b","Type":"ContainerStarted","Data":"3d483af8cbd9f72b68d9b5a5a343e48ac08548757aeaef660681d2cd3dfd85df"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.005413 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" event={"ID":"5485f8d6-493f-4b42-88a6-363043c13a90","Type":"ContainerStarted","Data":"e1d198ad55d4d95796d0e2c18424e17e7cf4654042df800a3761692ce7934e25"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.006137 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.008177 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" event={"ID":"6577ee23-bf0c-4452-adfa-40e6b6b0678c","Type":"ContainerStarted","Data":"7d3eca58a291b2aec51546025ae401b3c71e2d27518fbc90061dd1bbc8bf1c55"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.010360 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" event={"ID":"f3ee1c28-9784-4602-949b-c877f1c71adf","Type":"ContainerStarted","Data":"5a4203cc84894b77aa8de63b251c64480d45f1479793f5c7436c0b18b64470b9"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.013291 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" event={"ID":"4570adbe-e705-4cbe-aecb-b27a3b930048","Type":"ContainerStarted","Data":"d186d0335b30ae51960013dc7d24fc79a2bc3922a7341ddf5c0dde85280e39ed"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.013858 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.014804 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f87b4" event={"ID":"03c39fe9-7006-45af-8c8f-6f970b3a9656","Type":"ContainerStarted","Data":"2281613e0b605e215ec7a706f88662dcd9053931eed02e2946c0c932615a72ef"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.015930 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" event={"ID":"837c103b-8351-4e7d-b00b-1b51640a1dd2","Type":"ContainerStarted","Data":"0d5ac4b62d27797d0fe2331e004cb6842ba1b1c7e9cef3eb4fafd3254dd59a4e"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.016871 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" event={"ID":"b52e759c-d09d-42b3-b2c9-a9760a69ef5e","Type":"ContainerStarted","Data":"73ff510d896527621a08870fe68833dc0ff98d844409c3e1d215b582e3556960"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.018456 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ptzkb" event={"ID":"839d69e8-399b-4c30-b64f-893327a389e7","Type":"ContainerStarted","Data":"50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.020770 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5cxxd" event={"ID":"f140c3a7-856d-4cda-87f0-cf7aff2c1c7d","Type":"ContainerStarted","Data":"ed4600c28b7317175f5f6925e99bff839203f5ec11d71b50e6dd963e3309b044"} Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.033215 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.033263 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.033445 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:09 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:09 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:09 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.033461 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.034106 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.034517 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.534505957 +0000 UTC m=+144.855313643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.050155 4822 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5c4b8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.050193 4822 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-nmx5d container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.050295 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" podUID="4570adbe-e705-4cbe-aecb-b27a3b930048" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.050217 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.072635 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-d64jc" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.131764 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.135833 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.136439 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvwh2\" (UniqueName: \"kubernetes.io/projected/a693d2fd-8db7-4699-ad03-e70175ead53c-kube-api-access-bvwh2\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.136671 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-utilities\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.136914 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-catalog-content\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.137602 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.637587587 +0000 UTC m=+144.958395273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.269275 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jrmvv"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.271357 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kwtsp"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.271464 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.274110 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvwh2\" (UniqueName: \"kubernetes.io/projected/a693d2fd-8db7-4699-ad03-e70175ead53c-kube-api-access-bvwh2\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.274180 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-utilities\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.274228 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-catalog-content\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.274432 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.274893 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.774877443 +0000 UTC m=+145.095685139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.275643 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-utilities\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.279356 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-catalog-content\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.303758 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.332782 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jrmvv"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.383179 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-ptzkb" podStartSLOduration=124.383162313 podStartE2EDuration="2m4.383162313s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:09.382675248 +0000 UTC m=+144.703482934" watchObservedRunningTime="2025-12-01 06:53:09.383162313 +0000 UTC m=+144.703969999" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.385385 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.385552 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-utilities\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.385590 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx488\" (UniqueName: \"kubernetes.io/projected/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-kube-api-access-mx488\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.385652 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-catalog-content\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.385772 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.885760222 +0000 UTC m=+145.206567908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.406708 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5gkcw"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.407515 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvwh2\" (UniqueName: \"kubernetes.io/projected/a693d2fd-8db7-4699-ad03-e70175ead53c-kube-api-access-bvwh2\") pod \"certified-operators-kwtsp\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.432367 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.491482 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-catalog-content\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.491639 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-catalog-content\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.491717 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-utilities\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.491796 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhpqf\" (UniqueName: \"kubernetes.io/projected/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-kube-api-access-fhpqf\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.491822 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-utilities\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.491895 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx488\" (UniqueName: \"kubernetes.io/projected/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-kube-api-access-mx488\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.492284 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.492128 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-catalog-content\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.492229 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-utilities\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.492686 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:09.99267448 +0000 UTC m=+145.313482166 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.586332 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5gkcw"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.596187 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.596339 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-catalog-content\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.596370 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-utilities\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.596420 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhpqf\" (UniqueName: \"kubernetes.io/projected/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-kube-api-access-fhpqf\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.596824 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.096810032 +0000 UTC m=+145.417617708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.597179 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-utilities\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.597326 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-catalog-content\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.618084 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.622596 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2tczn"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.624004 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.645706 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhpqf\" (UniqueName: \"kubernetes.io/projected/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-kube-api-access-fhpqf\") pod \"certified-operators-5gkcw\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.660292 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx488\" (UniqueName: \"kubernetes.io/projected/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-kube-api-access-mx488\") pod \"community-operators-jrmvv\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.671883 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tczn"] Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.700318 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.700657 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-utilities\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.700778 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-catalog-content\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.700865 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf4s4\" (UniqueName: \"kubernetes.io/projected/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-kube-api-access-rf4s4\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.701277 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.201251314 +0000 UTC m=+145.522059000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.781454 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" podStartSLOduration=124.781438685 podStartE2EDuration="2m4.781438685s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:09.780867777 +0000 UTC m=+145.101675463" watchObservedRunningTime="2025-12-01 06:53:09.781438685 +0000 UTC m=+145.102246371" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.803973 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.804205 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-utilities\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.804246 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-catalog-content\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.804267 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf4s4\" (UniqueName: \"kubernetes.io/projected/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-kube-api-access-rf4s4\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.804574 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.304490059 +0000 UTC m=+145.625297745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.805299 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-catalog-content\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.805670 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-utilities\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.834298 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf4s4\" (UniqueName: \"kubernetes.io/projected/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-kube-api-access-rf4s4\") pod \"community-operators-2tczn\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.839601 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.848977 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.905327 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:09 crc kubenswrapper[4822]: E1201 06:53:09.905676 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.405665171 +0000 UTC m=+145.726472857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:09 crc kubenswrapper[4822]: I1201 06:53:09.983270 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.014142 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:10 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:10 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:10 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.014210 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.015130 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.015467 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.515454627 +0000 UTC m=+145.836262313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.080245 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" podStartSLOduration=125.080228667 podStartE2EDuration="2m5.080228667s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.079632888 +0000 UTC m=+145.400440574" watchObservedRunningTime="2025-12-01 06:53:10.080228667 +0000 UTC m=+145.401036353" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.089963 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9hgl8" podStartSLOduration=125.089946564 podStartE2EDuration="2m5.089946564s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:09.962033754 +0000 UTC m=+145.282841440" watchObservedRunningTime="2025-12-01 06:53:10.089946564 +0000 UTC m=+145.410754250" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.093269 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.093842 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.095166 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" event={"ID":"823bb36f-f372-441d-a483-85603bbac215","Type":"ContainerStarted","Data":"7d681d1cc94e305433e9e1b3a10eef1fc567e48de23d13f59b93d86f5f4a8f54"} Dec 01 06:53:10 crc kubenswrapper[4822]: W1201 06:53:10.098991 4822 reflector.go:561] object-"openshift-kube-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-kube-controller-manager": no relationship found between node 'crc' and this object Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.099023 4822 reflector.go:158] "Unhandled Error" err="object-\"openshift-kube-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-kube-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 01 06:53:10 crc kubenswrapper[4822]: W1201 06:53:10.099094 4822 reflector.go:561] object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n": failed to list *v1.Secret: secrets "installer-sa-dockercfg-kjl2n" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-kube-controller-manager": no relationship found between node 'crc' and this object Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.099132 4822 reflector.go:158] "Unhandled Error" err="object-\"openshift-kube-controller-manager\"/\"installer-sa-dockercfg-kjl2n\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"installer-sa-dockercfg-kjl2n\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-kube-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.109747 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" event={"ID":"6577ee23-bf0c-4452-adfa-40e6b6b0678c","Type":"ContainerStarted","Data":"fcbfac31d8e8749b5df69c81a61b58d7aef638e12895b4f8c3ff5d1ca37290b2"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.116776 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.117223 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.617206717 +0000 UTC m=+145.938014403 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.125682 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" event={"ID":"bab12393-6d6a-459b-8054-0934d99ffd0f","Type":"ContainerStarted","Data":"8113cb405d7d68dcc7047d452f1f0368db3e3d91d99d90b6caf3a17ed87e34f8"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.135406 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" event={"ID":"0416c999-c003-42ff-8c6e-30357b23975d","Type":"ContainerStarted","Data":"c8ddeea38e4790edaa03d08a87ba0723ddee86586daaaef2719328e4216b89a2"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.135585 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.137673 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" event={"ID":"1ef340e9-1eee-41e6-802f-4ee1768acf70","Type":"ContainerStarted","Data":"59a9d1874f230e73acdb00a585ecc1f100dcfadc83ba42cab0cdf265f87c9a4d"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.155136 4822 generic.go:334] "Generic (PLEG): container finished" podID="2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" containerID="0919d5049df538c888b405979ba43e6793a4eeaf631c4b79997d1d08d77387bb" exitCode=0 Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.155245 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" event={"ID":"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2","Type":"ContainerDied","Data":"0919d5049df538c888b405979ba43e6793a4eeaf631c4b79997d1d08d77387bb"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.158243 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f87b4" event={"ID":"03c39fe9-7006-45af-8c8f-6f970b3a9656","Type":"ContainerStarted","Data":"9322f38d58e37199af972fc5294d325ed57df6a6c7c1a53327557770c48dcbad"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.158811 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.185886 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" event={"ID":"837c103b-8351-4e7d-b00b-1b51640a1dd2","Type":"ContainerStarted","Data":"631cb3695d43a4cb9fe033a451a4fcfa1981d95b48decb21f565e8dbf0409728"} Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.187277 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.187317 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.189019 4822 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5c4b8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.189073 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.221163 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.221330 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.221445 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.222257 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.722243387 +0000 UTC m=+146.043051073 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.291943 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.323032 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.323182 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.323306 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.325261 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.825249525 +0000 UTC m=+146.146057211 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.325481 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.343093 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.436671 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.437479 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:10.937460074 +0000 UTC m=+146.258267760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.439270 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xqgh" podStartSLOduration=125.439246299 podStartE2EDuration="2m5.439246299s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.417377221 +0000 UTC m=+145.738184917" watchObservedRunningTime="2025-12-01 06:53:10.439246299 +0000 UTC m=+145.760053975" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.539884 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.541064 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.04104434 +0000 UTC m=+146.361852026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.573715 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-ddprm" podStartSLOduration=125.573700388 podStartE2EDuration="2m5.573700388s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.511082635 +0000 UTC m=+145.831890321" watchObservedRunningTime="2025-12-01 06:53:10.573700388 +0000 UTC m=+145.894508064" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.622442 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" podStartSLOduration=126.622424447 podStartE2EDuration="2m6.622424447s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.575766911 +0000 UTC m=+145.896574597" watchObservedRunningTime="2025-12-01 06:53:10.622424447 +0000 UTC m=+145.943232133" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.643302 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.644110 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.1440935 +0000 UTC m=+146.464901186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.714484 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-f87b4" podStartSLOduration=10.71445213 podStartE2EDuration="10.71445213s" podCreationTimestamp="2025-12-01 06:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.71182537 +0000 UTC m=+146.032633056" watchObservedRunningTime="2025-12-01 06:53:10.71445213 +0000 UTC m=+146.035259816" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.758087 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.758779 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.258749274 +0000 UTC m=+146.579556960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.826466 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4thh8" podStartSLOduration=126.826447873 podStartE2EDuration="2m6.826447873s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.799237861 +0000 UTC m=+146.120045547" watchObservedRunningTime="2025-12-01 06:53:10.826447873 +0000 UTC m=+146.147255559" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.826985 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zrxm2" podStartSLOduration=125.826981189 podStartE2EDuration="2m5.826981189s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.82537159 +0000 UTC m=+146.146179276" watchObservedRunningTime="2025-12-01 06:53:10.826981189 +0000 UTC m=+146.147788875" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.859514 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.860007 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.359981658 +0000 UTC m=+146.680789344 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.882652 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tv5ck"] Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.883874 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.888756 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.901774 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-s9g5t" podStartSLOduration=125.901760615 podStartE2EDuration="2m5.901760615s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:10.900022201 +0000 UTC m=+146.220829887" watchObservedRunningTime="2025-12-01 06:53:10.901760615 +0000 UTC m=+146.222568301" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.920519 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tv5ck"] Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.963910 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdrs2\" (UniqueName: \"kubernetes.io/projected/d9639beb-e6eb-42eb-9937-1b89a6be6100-kube-api-access-pdrs2\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.964003 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-utilities\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.964035 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:10 crc kubenswrapper[4822]: I1201 06:53:10.964054 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-catalog-content\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:10 crc kubenswrapper[4822]: E1201 06:53:10.964502 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.464486371 +0000 UTC m=+146.785294057 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.004747 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:11 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:11 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:11 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.004800 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.048446 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jrmvv"] Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.048501 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kwtsp"] Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.066150 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.066347 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-utilities\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.066381 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-catalog-content\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.066540 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.566502728 +0000 UTC m=+146.887310414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.066809 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-catalog-content\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.067047 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-utilities\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.079003 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdrs2\" (UniqueName: \"kubernetes.io/projected/d9639beb-e6eb-42eb-9937-1b89a6be6100-kube-api-access-pdrs2\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.136113 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdrs2\" (UniqueName: \"kubernetes.io/projected/d9639beb-e6eb-42eb-9937-1b89a6be6100-kube-api-access-pdrs2\") pod \"redhat-marketplace-tv5ck\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.188377 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.188769 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.688757545 +0000 UTC m=+147.009565231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.244001 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.267343 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" event={"ID":"823bb36f-f372-441d-a483-85603bbac215","Type":"ContainerStarted","Data":"2f511c15d1f3e29b08faa0e2dd945eb5a4db3248f0c7c4dba5f883735cc35cd2"} Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.271271 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ghqm7"] Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.279035 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.293091 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.293428 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.793413583 +0000 UTC m=+147.114221269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.309182 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" event={"ID":"bab12393-6d6a-459b-8054-0934d99ffd0f","Type":"ContainerStarted","Data":"d5d54518d52c090f2c5283aefa8bbc616ed673717775f5c436a177319738a722"} Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.328815 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ghqm7"] Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.364128 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tczn"] Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.366065 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.374871 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" podStartSLOduration=127.374854702 podStartE2EDuration="2m7.374854702s" podCreationTimestamp="2025-12-01 06:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:11.368601851 +0000 UTC m=+146.689409547" watchObservedRunningTime="2025-12-01 06:53:11.374854702 +0000 UTC m=+146.695662378" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.378902 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kwtsp" event={"ID":"a693d2fd-8db7-4699-ad03-e70175ead53c","Type":"ContainerStarted","Data":"9b55860dde902580635464244db736952dc13cb439db030956facb88205d9d63"} Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.382460 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.404833 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.405152 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdbcr\" (UniqueName: \"kubernetes.io/projected/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-kube-api-access-mdbcr\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.405256 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-catalog-content\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.405363 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-utilities\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.407503 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:11.90748949 +0000 UTC m=+147.228297176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.425990 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5gkcw"] Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.429883 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jrmvv" event={"ID":"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd","Type":"ContainerStarted","Data":"1e060892ffe760f012bdce019da91ce0acdd27d47b04a5994958391d3e6f7035"} Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.455113 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.507061 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.507369 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:12.007326471 +0000 UTC m=+147.328134157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.507567 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.507683 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdbcr\" (UniqueName: \"kubernetes.io/projected/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-kube-api-access-mdbcr\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.507780 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-catalog-content\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.507871 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-utilities\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.508317 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-utilities\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.508650 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-catalog-content\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.508860 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:12.008837317 +0000 UTC m=+147.329645073 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.565541 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdbcr\" (UniqueName: \"kubernetes.io/projected/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-kube-api-access-mdbcr\") pod \"redhat-marketplace-ghqm7\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.595983 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.615125 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.616842 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:53:12.116823768 +0000 UTC m=+147.437631454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.626260 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.626327 4822 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.635750 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.694770 4822 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-01T06:53:11.626366819Z","Handler":null,"Name":""} Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.716757 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:11 crc kubenswrapper[4822]: E1201 06:53:11.717166 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:53:12.217154974 +0000 UTC m=+147.537962650 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9f7zg" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.752803 4822 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.752854 4822 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.817572 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.823041 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.924312 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.958421 4822 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.958464 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.985692 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:11 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:11 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:11 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:11 crc kubenswrapper[4822]: I1201 06:53:11.985926 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.026541 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.026594 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.027660 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.040343 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.071495 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tv5ck"] Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.079823 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.122477 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.130373 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.130463 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.138396 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.141976 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.174876 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9f7zg\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.187680 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.191470 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.191601 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.232412 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-secret-volume\") pod \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.232582 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk59w\" (UniqueName: \"kubernetes.io/projected/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-kube-api-access-lk59w\") pod \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.232607 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-config-volume\") pod \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\" (UID: \"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2\") " Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.242773 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-config-volume" (OuterVolumeSpecName: "config-volume") pod "2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" (UID: "2c1f05f0-aa0d-48fa-a6de-04c19783f5c2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.266068 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nbmgc"] Dec 01 06:53:12 crc kubenswrapper[4822]: E1201 06:53:12.292743 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" containerName="collect-profiles" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.292771 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" containerName="collect-profiles" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.292935 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" containerName="collect-profiles" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.294122 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.295729 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" (UID: "2c1f05f0-aa0d-48fa-a6de-04c19783f5c2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.298458 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.304074 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-kube-api-access-lk59w" (OuterVolumeSpecName: "kube-api-access-lk59w") pod "2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" (UID: "2c1f05f0-aa0d-48fa-a6de-04c19783f5c2"). InnerVolumeSpecName "kube-api-access-lk59w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.310277 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbmgc"] Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.337950 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk59w\" (UniqueName: \"kubernetes.io/projected/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-kube-api-access-lk59w\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.337971 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.337981 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.446117 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-utilities\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.446733 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsjhx\" (UniqueName: \"kubernetes.io/projected/c43bed42-ab14-48f1-a79d-dca61de1b6f1-kube-api-access-wsjhx\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.446782 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-catalog-content\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.471137 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.539352 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" event={"ID":"2c1f05f0-aa0d-48fa-a6de-04c19783f5c2","Type":"ContainerDied","Data":"f4036dfbc893495378008daf7e30f7b32311cb13348cd809b7cea75f47f043f7"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.539392 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4036dfbc893495378008daf7e30f7b32311cb13348cd809b7cea75f47f043f7" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.539469 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.542513 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.542820 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.549823 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-catalog-content\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.549884 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-utilities\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.549936 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsjhx\" (UniqueName: \"kubernetes.io/projected/c43bed42-ab14-48f1-a79d-dca61de1b6f1-kube-api-access-wsjhx\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.550535 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-catalog-content\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.550757 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-utilities\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.575134 4822 generic.go:334] "Generic (PLEG): container finished" podID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerID="e9bfbf98a7107c94ce6c8d38c9749d7415e3783b446b67f9d28b3901ba693319" exitCode=0 Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.575300 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jrmvv" event={"ID":"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd","Type":"ContainerDied","Data":"e9bfbf98a7107c94ce6c8d38c9749d7415e3783b446b67f9d28b3901ba693319"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.578888 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.585731 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsjhx\" (UniqueName: \"kubernetes.io/projected/c43bed42-ab14-48f1-a79d-dca61de1b6f1-kube-api-access-wsjhx\") pod \"redhat-operators-nbmgc\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.618673 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ghqm7"] Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.631873 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1cf6c25b-660d-4ca2-aabf-c02779c20da9","Type":"ContainerStarted","Data":"aedc8ae24e05eba5217b0ff95e37b4da49b9828183312a5ff8c510a649849efb"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.672599 4822 generic.go:334] "Generic (PLEG): container finished" podID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerID="88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c" exitCode=0 Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.672999 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tczn" event={"ID":"05a1348c-7ab2-414b-a0ab-2e48808a2d4d","Type":"ContainerDied","Data":"88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.673028 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tczn" event={"ID":"05a1348c-7ab2-414b-a0ab-2e48808a2d4d","Type":"ContainerStarted","Data":"40039e8377920226b1ad8d27fb9558cc8393c45a4a44a3951f932f344e03226e"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.681764 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-smgxk"] Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.682791 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.684195 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.704599 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" event={"ID":"823bb36f-f372-441d-a483-85603bbac215","Type":"ContainerStarted","Data":"ca844fdb33450c96242a55755ea6082f9040a3a05c9bfcd71c1a97bb7a8f68ea"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.710829 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tv5ck" event={"ID":"d9639beb-e6eb-42eb-9937-1b89a6be6100","Type":"ContainerStarted","Data":"04dfa112bfe9415f3beb4848d2274a5647756a55f1f70461db5399d706dabf05"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.730851 4822 generic.go:334] "Generic (PLEG): container finished" podID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerID="bd65cca94353e87e69ecc107010f89211facc1824e260582cf587b5aede9e7a0" exitCode=0 Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.730946 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5gkcw" event={"ID":"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9","Type":"ContainerDied","Data":"bd65cca94353e87e69ecc107010f89211facc1824e260582cf587b5aede9e7a0"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.738396 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5gkcw" event={"ID":"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9","Type":"ContainerStarted","Data":"434d2f0fd4bdf21652187405bb68b668ffac02c703bc03b9a8cd50ddfa14fcfc"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.756143 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.756186 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.784359 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-smgxk"] Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.786619 4822 generic.go:334] "Generic (PLEG): container finished" podID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerID="fc4c7079fd9e958feef23dd457e3b34dda5f60a928a137a5a5a82463a56e4321" exitCode=0 Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.786862 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kwtsp" event={"ID":"a693d2fd-8db7-4699-ad03-e70175ead53c","Type":"ContainerDied","Data":"fc4c7079fd9e958feef23dd457e3b34dda5f60a928a137a5a5a82463a56e4321"} Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.859061 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-utilities\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.859120 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6cn\" (UniqueName: \"kubernetes.io/projected/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-kube-api-access-4h6cn\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.859144 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-catalog-content\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.961182 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-utilities\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.961614 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6cn\" (UniqueName: \"kubernetes.io/projected/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-kube-api-access-4h6cn\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.961653 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-catalog-content\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.964218 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-utilities\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.972972 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-catalog-content\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.977258 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.994673 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:12 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:12 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:12 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:12 crc kubenswrapper[4822]: I1201 06:53:12.994732 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.027605 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6cn\" (UniqueName: \"kubernetes.io/projected/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-kube-api-access-4h6cn\") pod \"redhat-operators-smgxk\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.081062 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.329527 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9f7zg"] Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.394762 4822 patch_prober.go:28] interesting pod/apiserver-76f77b778f-t4mcw container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]log ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]etcd ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/generic-apiserver-start-informers ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/max-in-flight-filter ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 01 06:53:13 crc kubenswrapper[4822]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 01 06:53:13 crc kubenswrapper[4822]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/project.openshift.io-projectcache ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/openshift.io-startinformers ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 01 06:53:13 crc kubenswrapper[4822]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 01 06:53:13 crc kubenswrapper[4822]: livez check failed Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.395169 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" podUID="bab12393-6d6a-459b-8054-0934d99ffd0f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.504042 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbmgc"] Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.693326 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-smgxk"] Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.808279 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" event={"ID":"ba3ade7b-ad1c-4f60-9cde-f7a198336912","Type":"ContainerStarted","Data":"666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.808352 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" event={"ID":"ba3ade7b-ad1c-4f60-9cde-f7a198336912","Type":"ContainerStarted","Data":"ec37137fd117fc16fe56e25baf244ff6b523f75bd244885657fcb647df0827b1"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.808412 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.811380 4822 generic.go:334] "Generic (PLEG): container finished" podID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerID="b322e5672e69c572b77dc69834d652fd90053e0a7003f1b89533525f1c618052" exitCode=0 Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.811575 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ghqm7" event={"ID":"d0d1fe31-3d9d-4422-9b5b-d73653be78c8","Type":"ContainerDied","Data":"b322e5672e69c572b77dc69834d652fd90053e0a7003f1b89533525f1c618052"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.811606 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ghqm7" event={"ID":"d0d1fe31-3d9d-4422-9b5b-d73653be78c8","Type":"ContainerStarted","Data":"9c70fca2c35a7113a9096ab928a2727a5d7d9cac154660c6f5edcdd219ec52b2"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.815283 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"945dc46af0ee3d6da32cfa64441b79c457a9db2a9c71b7ec6a05e316b5dbdf7d"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.815316 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2459f3e3c3aa8bd7af273fd9ff7a19b648afc74be34566b2ff7652cea90ce83e"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.817143 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bf038b878c5ebf738da2898dec6f3b66bb030ee46c126551b9f7cff184130bf4"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.817179 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"97c13a704b27aea1810126c7b891207bdd30521631e202455f9b7c50d1767976"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.817374 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.822130 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" event={"ID":"823bb36f-f372-441d-a483-85603bbac215","Type":"ContainerStarted","Data":"504333a4a5657131eb334459adb96ef1e515f5d13e07bbdc92e7f3e67eb53584"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.833417 4822 generic.go:334] "Generic (PLEG): container finished" podID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerID="7beaa652fec2d1d1d2de74195c6601d1bb04000e4aec068ffc67edc2ef10830c" exitCode=0 Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.833484 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tv5ck" event={"ID":"d9639beb-e6eb-42eb-9937-1b89a6be6100","Type":"ContainerDied","Data":"7beaa652fec2d1d1d2de74195c6601d1bb04000e4aec068ffc67edc2ef10830c"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.846668 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"09714ebae0e7c75558f1e60e30159eb433d14f2ea8af7bb1c6257252ffe03034"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.846727 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"32f3b2394083cea81aa520cb4da7949388e5ac1460a44457e9439d80dfc429f4"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.849844 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1cf6c25b-660d-4ca2-aabf-c02779c20da9","Type":"ContainerStarted","Data":"f4298755815f70d757c7f97cc89f570daa240e74dc5b6d161216ad0cbf295862"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.850473 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" podStartSLOduration=128.850461153 podStartE2EDuration="2m8.850461153s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:13.841052846 +0000 UTC m=+149.161860542" watchObservedRunningTime="2025-12-01 06:53:13.850461153 +0000 UTC m=+149.171268839" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.856944 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerStarted","Data":"dd661be7c1003195c88f67523188446b570c178503049e9bc86715f87f1a6314"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.860834 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smgxk" event={"ID":"4eaba10b-a8b4-4079-a55c-0807ca25b2e4","Type":"ContainerStarted","Data":"e05e3cb2cbe27804f9145962a729541c76c2e65da4b7a90577c3b3f24c3a9b58"} Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.898250 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.898310 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.900769 4822 patch_prober.go:28] interesting pod/console-f9d7485db-ptzkb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.900889 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-ptzkb" podUID="839d69e8-399b-4c30-b64f-893327a389e7" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.920358 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.920437 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.922955 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.923033 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.940374 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-6mnpz" podStartSLOduration=13.94035553 podStartE2EDuration="13.94035553s" podCreationTimestamp="2025-12-01 06:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:13.937369129 +0000 UTC m=+149.258176815" watchObservedRunningTime="2025-12-01 06:53:13.94035553 +0000 UTC m=+149.261163216" Dec 01 06:53:13 crc kubenswrapper[4822]: I1201 06:53:13.981432 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.025159 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:14 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:14 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:14 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.025209 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.718127 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dkqzc" Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.868149 4822 generic.go:334] "Generic (PLEG): container finished" podID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerID="fff687c97ff61237d4d76d6629b3881d26d425c8877bc3d925676a781f9cbcc7" exitCode=0 Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.868214 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerDied","Data":"fff687c97ff61237d4d76d6629b3881d26d425c8877bc3d925676a781f9cbcc7"} Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.869762 4822 generic.go:334] "Generic (PLEG): container finished" podID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerID="d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068" exitCode=0 Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.869811 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smgxk" event={"ID":"4eaba10b-a8b4-4079-a55c-0807ca25b2e4","Type":"ContainerDied","Data":"d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068"} Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.873499 4822 generic.go:334] "Generic (PLEG): container finished" podID="1cf6c25b-660d-4ca2-aabf-c02779c20da9" containerID="f4298755815f70d757c7f97cc89f570daa240e74dc5b6d161216ad0cbf295862" exitCode=0 Dec 01 06:53:14 crc kubenswrapper[4822]: I1201 06:53:14.874011 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1cf6c25b-660d-4ca2-aabf-c02779c20da9","Type":"ContainerDied","Data":"f4298755815f70d757c7f97cc89f570daa240e74dc5b6d161216ad0cbf295862"} Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.004522 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:15 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:15 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:15 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.004622 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.394183 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.514331 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kube-api-access\") pod \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.514494 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kubelet-dir\") pod \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\" (UID: \"1cf6c25b-660d-4ca2-aabf-c02779c20da9\") " Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.514645 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1cf6c25b-660d-4ca2-aabf-c02779c20da9" (UID: "1cf6c25b-660d-4ca2-aabf-c02779c20da9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.515014 4822 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.523410 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1cf6c25b-660d-4ca2-aabf-c02779c20da9" (UID: "1cf6c25b-660d-4ca2-aabf-c02779c20da9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.616947 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf6c25b-660d-4ca2-aabf-c02779c20da9-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.886360 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1cf6c25b-660d-4ca2-aabf-c02779c20da9","Type":"ContainerDied","Data":"aedc8ae24e05eba5217b0ff95e37b4da49b9828183312a5ff8c510a649849efb"} Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.886404 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aedc8ae24e05eba5217b0ff95e37b4da49b9828183312a5ff8c510a649849efb" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.886478 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.987411 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:15 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:15 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:15 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.987480 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:15 crc kubenswrapper[4822]: I1201 06:53:15.995676 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.937514 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 06:53:16 crc kubenswrapper[4822]: E1201 06:53:16.937846 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cf6c25b-660d-4ca2-aabf-c02779c20da9" containerName="pruner" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.937859 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cf6c25b-660d-4ca2-aabf-c02779c20da9" containerName="pruner" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.937982 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cf6c25b-660d-4ca2-aabf-c02779c20da9" containerName="pruner" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.938977 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.944008 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.944192 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.971846 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.985784 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:16 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:16 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:16 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:16 crc kubenswrapper[4822]: I1201 06:53:16.985885 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.041896 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.042029 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.143898 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.144017 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.144231 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.164167 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.268179 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.767445 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.772859 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-t4mcw" Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.806898 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.987326 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:17 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:17 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:17 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:17 crc kubenswrapper[4822]: I1201 06:53:17.987856 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:18 crc kubenswrapper[4822]: I1201 06:53:18.001751 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33a7bca7-b1da-40e2-8c24-3d63a860cd19","Type":"ContainerStarted","Data":"61c6b9937a2e160c484354371dce4090cd295cbd524f23ba3d2d55e8dbeaeb3d"} Dec 01 06:53:18 crc kubenswrapper[4822]: I1201 06:53:18.659378 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-f87b4" Dec 01 06:53:18 crc kubenswrapper[4822]: I1201 06:53:18.984447 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:18 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:18 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:18 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:18 crc kubenswrapper[4822]: I1201 06:53:18.984543 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:19 crc kubenswrapper[4822]: I1201 06:53:19.037687 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33a7bca7-b1da-40e2-8c24-3d63a860cd19","Type":"ContainerStarted","Data":"cd7e4463413c516fc843eb0cac0f15204eda36b762789374117fbdba0a6c08a9"} Dec 01 06:53:19 crc kubenswrapper[4822]: I1201 06:53:19.985786 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:19 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:19 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:19 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:19 crc kubenswrapper[4822]: I1201 06:53:19.988344 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:20 crc kubenswrapper[4822]: I1201 06:53:20.072597 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.072576023 podStartE2EDuration="4.072576023s" podCreationTimestamp="2025-12-01 06:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:20.069708715 +0000 UTC m=+155.390516401" watchObservedRunningTime="2025-12-01 06:53:20.072576023 +0000 UTC m=+155.393383709" Dec 01 06:53:20 crc kubenswrapper[4822]: I1201 06:53:20.983329 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:20 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:20 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:20 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:20 crc kubenswrapper[4822]: I1201 06:53:20.983397 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:21 crc kubenswrapper[4822]: I1201 06:53:21.065916 4822 generic.go:334] "Generic (PLEG): container finished" podID="33a7bca7-b1da-40e2-8c24-3d63a860cd19" containerID="cd7e4463413c516fc843eb0cac0f15204eda36b762789374117fbdba0a6c08a9" exitCode=0 Dec 01 06:53:21 crc kubenswrapper[4822]: I1201 06:53:21.066026 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33a7bca7-b1da-40e2-8c24-3d63a860cd19","Type":"ContainerDied","Data":"cd7e4463413c516fc843eb0cac0f15204eda36b762789374117fbdba0a6c08a9"} Dec 01 06:53:21 crc kubenswrapper[4822]: I1201 06:53:21.985374 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:21 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:21 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:21 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:21 crc kubenswrapper[4822]: I1201 06:53:21.985826 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:22 crc kubenswrapper[4822]: I1201 06:53:22.991396 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:22 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:22 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:22 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:22 crc kubenswrapper[4822]: I1201 06:53:22.991764 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.903586 4822 patch_prober.go:28] interesting pod/console-f9d7485db-ptzkb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.903693 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-ptzkb" podUID="839d69e8-399b-4c30-b64f-893327a389e7" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.919095 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.919116 4822 patch_prober.go:28] interesting pod/downloads-7954f5f757-k9qc2 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.919161 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.919203 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-k9qc2" podUID="9170d425-7fae-40d9-aab0-7d2afbaa56e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.31:8080/\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.983391 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:23 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:23 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:23 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:23 crc kubenswrapper[4822]: I1201 06:53:23.983462 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:24 crc kubenswrapper[4822]: I1201 06:53:24.984303 4822 patch_prober.go:28] interesting pod/router-default-5444994796-nkhfk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:53:24 crc kubenswrapper[4822]: [-]has-synced failed: reason withheld Dec 01 06:53:24 crc kubenswrapper[4822]: [+]process-running ok Dec 01 06:53:24 crc kubenswrapper[4822]: healthz check failed Dec 01 06:53:24 crc kubenswrapper[4822]: I1201 06:53:24.985053 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkhfk" podUID="9fafefcc-1420-4d16-a98e-56e31a3864ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:53:25 crc kubenswrapper[4822]: I1201 06:53:25.987491 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:25 crc kubenswrapper[4822]: I1201 06:53:25.990538 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nkhfk" Dec 01 06:53:27 crc kubenswrapper[4822]: I1201 06:53:27.737777 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:53:27 crc kubenswrapper[4822]: I1201 06:53:27.743705 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80225810-9d72-45b0-980f-1cb242d987e8-metrics-certs\") pod \"network-metrics-daemon-lk8mq\" (UID: \"80225810-9d72-45b0-980f-1cb242d987e8\") " pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:53:27 crc kubenswrapper[4822]: I1201 06:53:27.767872 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-lk8mq" Dec 01 06:53:31 crc kubenswrapper[4822]: I1201 06:53:31.967239 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.002053 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kube-api-access\") pod \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.002178 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kubelet-dir\") pod \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\" (UID: \"33a7bca7-b1da-40e2-8c24-3d63a860cd19\") " Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.002699 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "33a7bca7-b1da-40e2-8c24-3d63a860cd19" (UID: "33a7bca7-b1da-40e2-8c24-3d63a860cd19"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.004420 4822 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.010279 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "33a7bca7-b1da-40e2-8c24-3d63a860cd19" (UID: "33a7bca7-b1da-40e2-8c24-3d63a860cd19"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.106663 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33a7bca7-b1da-40e2-8c24-3d63a860cd19-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.147540 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33a7bca7-b1da-40e2-8c24-3d63a860cd19","Type":"ContainerDied","Data":"61c6b9937a2e160c484354371dce4090cd295cbd524f23ba3d2d55e8dbeaeb3d"} Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.147700 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61c6b9937a2e160c484354371dce4090cd295cbd524f23ba3d2d55e8dbeaeb3d" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.147700 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:53:32 crc kubenswrapper[4822]: I1201 06:53:32.480122 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:53:33 crc kubenswrapper[4822]: I1201 06:53:33.902574 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:33 crc kubenswrapper[4822]: I1201 06:53:33.907286 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 06:53:33 crc kubenswrapper[4822]: I1201 06:53:33.929351 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-k9qc2" Dec 01 06:53:42 crc kubenswrapper[4822]: I1201 06:53:42.543892 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:53:42 crc kubenswrapper[4822]: I1201 06:53:42.544626 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:53:43 crc kubenswrapper[4822]: E1201 06:53:43.047776 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 01 06:53:43 crc kubenswrapper[4822]: E1201 06:53:43.048292 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4h6cn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-smgxk_openshift-marketplace(4eaba10b-a8b4-4079-a55c-0807ca25b2e4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 06:53:43 crc kubenswrapper[4822]: E1201 06:53:43.049509 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-smgxk" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" Dec 01 06:53:43 crc kubenswrapper[4822]: I1201 06:53:43.724414 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw8s9" Dec 01 06:53:44 crc kubenswrapper[4822]: E1201 06:53:44.021616 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-smgxk" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" Dec 01 06:53:44 crc kubenswrapper[4822]: E1201 06:53:44.076792 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 01 06:53:44 crc kubenswrapper[4822]: E1201 06:53:44.076934 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mdbcr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ghqm7_openshift-marketplace(d0d1fe31-3d9d-4422-9b5b-d73653be78c8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 06:53:44 crc kubenswrapper[4822]: E1201 06:53:44.078112 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-ghqm7" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.481536 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ghqm7" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.579223 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.579353 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pdrs2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tv5ck_openshift-marketplace(d9639beb-e6eb-42eb-9937-1b89a6be6100): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.581726 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tv5ck" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.605384 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.605700 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rf4s4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-2tczn_openshift-marketplace(05a1348c-7ab2-414b-a0ab-2e48808a2d4d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.606948 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-2tczn" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.619563 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.619767 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bvwh2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-kwtsp_openshift-marketplace(a693d2fd-8db7-4699-ad03-e70175ead53c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 06:53:45 crc kubenswrapper[4822]: E1201 06:53:45.621796 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-kwtsp" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" Dec 01 06:53:45 crc kubenswrapper[4822]: I1201 06:53:45.962279 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-lk8mq"] Dec 01 06:53:45 crc kubenswrapper[4822]: W1201 06:53:45.966797 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80225810_9d72_45b0_980f_1cb242d987e8.slice/crio-f6bc347aa6e88c163957409f642d8e28b0a1b2edfa525d9e9c426f940de272f2 WatchSource:0}: Error finding container f6bc347aa6e88c163957409f642d8e28b0a1b2edfa525d9e9c426f940de272f2: Status 404 returned error can't find the container with id f6bc347aa6e88c163957409f642d8e28b0a1b2edfa525d9e9c426f940de272f2 Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.236876 4822 generic.go:334] "Generic (PLEG): container finished" podID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerID="75ad0e68777a771a7b159e4c7213816ae513ea74cfe5392f9ef94e3d615be138" exitCode=0 Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.237505 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jrmvv" event={"ID":"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd","Type":"ContainerDied","Data":"75ad0e68777a771a7b159e4c7213816ae513ea74cfe5392f9ef94e3d615be138"} Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.241898 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerStarted","Data":"112cb79570516f314e5c168a479445db1a85cf86cf589435b525f2d9c6c1f864"} Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.244305 4822 generic.go:334] "Generic (PLEG): container finished" podID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerID="2d0d6b1cb2c9e3dfd3ecaa0299d628c28600f277a914cb45e3769052d5512d0d" exitCode=0 Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.244421 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5gkcw" event={"ID":"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9","Type":"ContainerDied","Data":"2d0d6b1cb2c9e3dfd3ecaa0299d628c28600f277a914cb45e3769052d5512d0d"} Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.254220 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" event={"ID":"80225810-9d72-45b0-980f-1cb242d987e8","Type":"ContainerStarted","Data":"db4f89a3cee489e9910d6351ae297a9537f9a78c657df908d9920e65ff2eb5b8"} Dec 01 06:53:46 crc kubenswrapper[4822]: I1201 06:53:46.254315 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" event={"ID":"80225810-9d72-45b0-980f-1cb242d987e8","Type":"ContainerStarted","Data":"f6bc347aa6e88c163957409f642d8e28b0a1b2edfa525d9e9c426f940de272f2"} Dec 01 06:53:46 crc kubenswrapper[4822]: E1201 06:53:46.257832 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-kwtsp" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" Dec 01 06:53:46 crc kubenswrapper[4822]: E1201 06:53:46.258150 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tv5ck" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" Dec 01 06:53:46 crc kubenswrapper[4822]: E1201 06:53:46.258237 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-2tczn" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.264437 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5gkcw" event={"ID":"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9","Type":"ContainerStarted","Data":"f93e25c716741bed2a90c4c87c1254c43c9792eb39b413bf31bd2b145d71cc81"} Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.271119 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-lk8mq" event={"ID":"80225810-9d72-45b0-980f-1cb242d987e8","Type":"ContainerStarted","Data":"95763b5b70e0e8cd6b60c7f3eb7f2ed8cc896a4668ee0bcd8d0409101759c8b7"} Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.274793 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jrmvv" event={"ID":"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd","Type":"ContainerStarted","Data":"df6b8210f80f3c995a39421f97443e6b7fc4716fb346ffee5aa6af3601c32e73"} Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.277167 4822 generic.go:334] "Generic (PLEG): container finished" podID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerID="112cb79570516f314e5c168a479445db1a85cf86cf589435b525f2d9c6c1f864" exitCode=0 Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.277274 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerDied","Data":"112cb79570516f314e5c168a479445db1a85cf86cf589435b525f2d9c6c1f864"} Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.294899 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5gkcw" podStartSLOduration=4.413603231 podStartE2EDuration="38.294878337s" podCreationTimestamp="2025-12-01 06:53:09 +0000 UTC" firstStartedPulling="2025-12-01 06:53:12.783984539 +0000 UTC m=+148.104792225" lastFinishedPulling="2025-12-01 06:53:46.665259645 +0000 UTC m=+181.986067331" observedRunningTime="2025-12-01 06:53:47.292900266 +0000 UTC m=+182.613707952" watchObservedRunningTime="2025-12-01 06:53:47.294878337 +0000 UTC m=+182.615686023" Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.330536 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-lk8mq" podStartSLOduration=162.330517046 podStartE2EDuration="2m42.330517046s" podCreationTimestamp="2025-12-01 06:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:47.328256817 +0000 UTC m=+182.649064503" watchObservedRunningTime="2025-12-01 06:53:47.330517046 +0000 UTC m=+182.651324732" Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.356994 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jrmvv" podStartSLOduration=4.222275123 podStartE2EDuration="38.356971024s" podCreationTimestamp="2025-12-01 06:53:09 +0000 UTC" firstStartedPulling="2025-12-01 06:53:12.578611112 +0000 UTC m=+147.899418798" lastFinishedPulling="2025-12-01 06:53:46.713307013 +0000 UTC m=+182.034114699" observedRunningTime="2025-12-01 06:53:47.348689051 +0000 UTC m=+182.669496737" watchObservedRunningTime="2025-12-01 06:53:47.356971024 +0000 UTC m=+182.677778710" Dec 01 06:53:47 crc kubenswrapper[4822]: I1201 06:53:47.422212 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-jq95c"] Dec 01 06:53:48 crc kubenswrapper[4822]: I1201 06:53:48.307771 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerStarted","Data":"54d15f111dca7ffbab3597e812bb17c250186ec9ea700cc8aaa3bfa8d317dac6"} Dec 01 06:53:48 crc kubenswrapper[4822]: I1201 06:53:48.332929 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nbmgc" podStartSLOduration=2.487216254 podStartE2EDuration="36.332905571s" podCreationTimestamp="2025-12-01 06:53:12 +0000 UTC" firstStartedPulling="2025-12-01 06:53:13.861964865 +0000 UTC m=+149.182772551" lastFinishedPulling="2025-12-01 06:53:47.707654182 +0000 UTC m=+183.028461868" observedRunningTime="2025-12-01 06:53:48.328894199 +0000 UTC m=+183.649701885" watchObservedRunningTime="2025-12-01 06:53:48.332905571 +0000 UTC m=+183.653713257" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.841062 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.841205 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.850186 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.850254 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.898810 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.904392 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.948534 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 06:53:49 crc kubenswrapper[4822]: E1201 06:53:49.948874 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a7bca7-b1da-40e2-8c24-3d63a860cd19" containerName="pruner" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.948891 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a7bca7-b1da-40e2-8c24-3d63a860cd19" containerName="pruner" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.949018 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="33a7bca7-b1da-40e2-8c24-3d63a860cd19" containerName="pruner" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.949506 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.953058 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.953177 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.977204 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.986904 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1481c01c-cd36-4cc6-98d7-5be929382322-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:49 crc kubenswrapper[4822]: I1201 06:53:49.986982 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1481c01c-cd36-4cc6-98d7-5be929382322-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:50 crc kubenswrapper[4822]: I1201 06:53:50.088765 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1481c01c-cd36-4cc6-98d7-5be929382322-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:50 crc kubenswrapper[4822]: I1201 06:53:50.088905 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1481c01c-cd36-4cc6-98d7-5be929382322-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:50 crc kubenswrapper[4822]: I1201 06:53:50.088924 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1481c01c-cd36-4cc6-98d7-5be929382322-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:50 crc kubenswrapper[4822]: I1201 06:53:50.114300 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1481c01c-cd36-4cc6-98d7-5be929382322-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:50 crc kubenswrapper[4822]: I1201 06:53:50.271619 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:50 crc kubenswrapper[4822]: I1201 06:53:50.711377 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 06:53:51 crc kubenswrapper[4822]: I1201 06:53:51.327470 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1481c01c-cd36-4cc6-98d7-5be929382322","Type":"ContainerStarted","Data":"6240e9309a6a87bfc49156c6373718910cdc36893177747d0421ee59d72ad5e9"} Dec 01 06:53:51 crc kubenswrapper[4822]: I1201 06:53:51.327530 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1481c01c-cd36-4cc6-98d7-5be929382322","Type":"ContainerStarted","Data":"b051e2e821cb005f4093d7998cadb372a3e277085133e8e903618ef2a8bed5ff"} Dec 01 06:53:51 crc kubenswrapper[4822]: I1201 06:53:51.343340 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.3433149650000002 podStartE2EDuration="2.343314965s" podCreationTimestamp="2025-12-01 06:53:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:51.340717336 +0000 UTC m=+186.661525022" watchObservedRunningTime="2025-12-01 06:53:51.343314965 +0000 UTC m=+186.664122641" Dec 01 06:53:52 crc kubenswrapper[4822]: I1201 06:53:52.192257 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:53:52 crc kubenswrapper[4822]: I1201 06:53:52.341821 4822 generic.go:334] "Generic (PLEG): container finished" podID="1481c01c-cd36-4cc6-98d7-5be929382322" containerID="6240e9309a6a87bfc49156c6373718910cdc36893177747d0421ee59d72ad5e9" exitCode=0 Dec 01 06:53:52 crc kubenswrapper[4822]: I1201 06:53:52.341874 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1481c01c-cd36-4cc6-98d7-5be929382322","Type":"ContainerDied","Data":"6240e9309a6a87bfc49156c6373718910cdc36893177747d0421ee59d72ad5e9"} Dec 01 06:53:52 crc kubenswrapper[4822]: I1201 06:53:52.685211 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:52 crc kubenswrapper[4822]: I1201 06:53:52.685281 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.631726 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.736507 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nbmgc" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="registry-server" probeResult="failure" output=< Dec 01 06:53:53 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 06:53:53 crc kubenswrapper[4822]: > Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.758306 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1481c01c-cd36-4cc6-98d7-5be929382322-kubelet-dir\") pod \"1481c01c-cd36-4cc6-98d7-5be929382322\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.758431 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1481c01c-cd36-4cc6-98d7-5be929382322-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1481c01c-cd36-4cc6-98d7-5be929382322" (UID: "1481c01c-cd36-4cc6-98d7-5be929382322"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.758588 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1481c01c-cd36-4cc6-98d7-5be929382322-kube-api-access\") pod \"1481c01c-cd36-4cc6-98d7-5be929382322\" (UID: \"1481c01c-cd36-4cc6-98d7-5be929382322\") " Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.758820 4822 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1481c01c-cd36-4cc6-98d7-5be929382322-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.768757 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1481c01c-cd36-4cc6-98d7-5be929382322-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1481c01c-cd36-4cc6-98d7-5be929382322" (UID: "1481c01c-cd36-4cc6-98d7-5be929382322"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:53:53 crc kubenswrapper[4822]: I1201 06:53:53.860836 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1481c01c-cd36-4cc6-98d7-5be929382322-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:53:54 crc kubenswrapper[4822]: I1201 06:53:54.354974 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1481c01c-cd36-4cc6-98d7-5be929382322","Type":"ContainerDied","Data":"b051e2e821cb005f4093d7998cadb372a3e277085133e8e903618ef2a8bed5ff"} Dec 01 06:53:54 crc kubenswrapper[4822]: I1201 06:53:54.355029 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:53:54 crc kubenswrapper[4822]: I1201 06:53:54.355037 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b051e2e821cb005f4093d7998cadb372a3e277085133e8e903618ef2a8bed5ff" Dec 01 06:53:56 crc kubenswrapper[4822]: I1201 06:53:56.371015 4822 generic.go:334] "Generic (PLEG): container finished" podID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerID="d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5" exitCode=0 Dec 01 06:53:56 crc kubenswrapper[4822]: I1201 06:53:56.371073 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smgxk" event={"ID":"4eaba10b-a8b4-4079-a55c-0807ca25b2e4","Type":"ContainerDied","Data":"d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5"} Dec 01 06:53:57 crc kubenswrapper[4822]: I1201 06:53:57.381496 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smgxk" event={"ID":"4eaba10b-a8b4-4079-a55c-0807ca25b2e4","Type":"ContainerStarted","Data":"85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3"} Dec 01 06:53:57 crc kubenswrapper[4822]: I1201 06:53:57.410214 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-smgxk" podStartSLOduration=3.5177239670000002 podStartE2EDuration="45.410183052s" podCreationTimestamp="2025-12-01 06:53:12 +0000 UTC" firstStartedPulling="2025-12-01 06:53:14.872694454 +0000 UTC m=+150.193502140" lastFinishedPulling="2025-12-01 06:53:56.765153549 +0000 UTC m=+192.085961225" observedRunningTime="2025-12-01 06:53:57.405776628 +0000 UTC m=+192.726584324" watchObservedRunningTime="2025-12-01 06:53:57.410183052 +0000 UTC m=+192.730990738" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.331946 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 06:53:58 crc kubenswrapper[4822]: E1201 06:53:58.332199 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1481c01c-cd36-4cc6-98d7-5be929382322" containerName="pruner" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.332218 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="1481c01c-cd36-4cc6-98d7-5be929382322" containerName="pruner" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.332359 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="1481c01c-cd36-4cc6-98d7-5be929382322" containerName="pruner" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.332819 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.335637 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.337737 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.347331 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.438575 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-kubelet-dir\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.439246 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fec124f-3cc9-4991-85b7-3a73de687907-kube-api-access\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.439401 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-var-lock\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.540583 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-kubelet-dir\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.540629 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fec124f-3cc9-4991-85b7-3a73de687907-kube-api-access\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.540661 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-var-lock\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.540784 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-var-lock\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.540820 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-kubelet-dir\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.575026 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fec124f-3cc9-4991-85b7-3a73de687907-kube-api-access\") pod \"installer-9-crc\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:58 crc kubenswrapper[4822]: I1201 06:53:58.666118 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:53:59 crc kubenswrapper[4822]: I1201 06:53:59.077851 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 06:53:59 crc kubenswrapper[4822]: W1201 06:53:59.353703 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod3fec124f_3cc9_4991_85b7_3a73de687907.slice/crio-22cdb9238c980e04f11c15f355ff99840ca6bd8d36ba823896fffef90a9341fd WatchSource:0}: Error finding container 22cdb9238c980e04f11c15f355ff99840ca6bd8d36ba823896fffef90a9341fd: Status 404 returned error can't find the container with id 22cdb9238c980e04f11c15f355ff99840ca6bd8d36ba823896fffef90a9341fd Dec 01 06:53:59 crc kubenswrapper[4822]: I1201 06:53:59.395200 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3fec124f-3cc9-4991-85b7-3a73de687907","Type":"ContainerStarted","Data":"22cdb9238c980e04f11c15f355ff99840ca6bd8d36ba823896fffef90a9341fd"} Dec 01 06:53:59 crc kubenswrapper[4822]: I1201 06:53:59.888194 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:53:59 crc kubenswrapper[4822]: I1201 06:53:59.901625 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:54:00 crc kubenswrapper[4822]: I1201 06:54:00.403076 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tv5ck" event={"ID":"d9639beb-e6eb-42eb-9937-1b89a6be6100","Type":"ContainerDied","Data":"c18c50dda977ee3b552dc70c2a4cf04172d92bb7670ccda273216a1682b8515d"} Dec 01 06:54:00 crc kubenswrapper[4822]: I1201 06:54:00.403027 4822 generic.go:334] "Generic (PLEG): container finished" podID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerID="c18c50dda977ee3b552dc70c2a4cf04172d92bb7670ccda273216a1682b8515d" exitCode=0 Dec 01 06:54:00 crc kubenswrapper[4822]: I1201 06:54:00.414908 4822 generic.go:334] "Generic (PLEG): container finished" podID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerID="269b55bcc3195d64d861281f320247ae0cf4295405bd1681738a6e79d7359e7d" exitCode=0 Dec 01 06:54:00 crc kubenswrapper[4822]: I1201 06:54:00.414980 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ghqm7" event={"ID":"d0d1fe31-3d9d-4422-9b5b-d73653be78c8","Type":"ContainerDied","Data":"269b55bcc3195d64d861281f320247ae0cf4295405bd1681738a6e79d7359e7d"} Dec 01 06:54:00 crc kubenswrapper[4822]: I1201 06:54:00.425059 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3fec124f-3cc9-4991-85b7-3a73de687907","Type":"ContainerStarted","Data":"989252238d905c784261fe06430262674d7606cf6fed841e2361925fa7ff693d"} Dec 01 06:54:00 crc kubenswrapper[4822]: I1201 06:54:00.466991 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.466954794 podStartE2EDuration="2.466954794s" podCreationTimestamp="2025-12-01 06:53:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:54:00.463659673 +0000 UTC m=+195.784467359" watchObservedRunningTime="2025-12-01 06:54:00.466954794 +0000 UTC m=+195.787762480" Dec 01 06:54:01 crc kubenswrapper[4822]: I1201 06:54:01.220175 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5gkcw"] Dec 01 06:54:01 crc kubenswrapper[4822]: I1201 06:54:01.221028 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5gkcw" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="registry-server" containerID="cri-o://f93e25c716741bed2a90c4c87c1254c43c9792eb39b413bf31bd2b145d71cc81" gracePeriod=2 Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.451216 4822 generic.go:334] "Generic (PLEG): container finished" podID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerID="f93e25c716741bed2a90c4c87c1254c43c9792eb39b413bf31bd2b145d71cc81" exitCode=0 Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.451315 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5gkcw" event={"ID":"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9","Type":"ContainerDied","Data":"f93e25c716741bed2a90c4c87c1254c43c9792eb39b413bf31bd2b145d71cc81"} Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.453960 4822 generic.go:334] "Generic (PLEG): container finished" podID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerID="ccb134177235cabde5b6218ff59f511a7980092cc052143ac0c0126ec0ea8a0e" exitCode=0 Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.453983 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kwtsp" event={"ID":"a693d2fd-8db7-4699-ad03-e70175ead53c","Type":"ContainerDied","Data":"ccb134177235cabde5b6218ff59f511a7980092cc052143ac0c0126ec0ea8a0e"} Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.713849 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.737771 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.776377 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.820851 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-catalog-content\") pod \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.820969 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhpqf\" (UniqueName: \"kubernetes.io/projected/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-kube-api-access-fhpqf\") pod \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.821035 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-utilities\") pod \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\" (UID: \"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9\") " Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.831703 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-utilities" (OuterVolumeSpecName: "utilities") pod "dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" (UID: "dbbcf3ab-f810-4bb6-b401-f9365b66b6e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.833468 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-kube-api-access-fhpqf" (OuterVolumeSpecName: "kube-api-access-fhpqf") pod "dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" (UID: "dbbcf3ab-f810-4bb6-b401-f9365b66b6e9"). InnerVolumeSpecName "kube-api-access-fhpqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.900759 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" (UID: "dbbcf3ab-f810-4bb6-b401-f9365b66b6e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.923036 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.923065 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhpqf\" (UniqueName: \"kubernetes.io/projected/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-kube-api-access-fhpqf\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:02 crc kubenswrapper[4822]: I1201 06:54:02.923080 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.082520 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.082583 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.192406 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.464516 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5gkcw" event={"ID":"dbbcf3ab-f810-4bb6-b401-f9365b66b6e9","Type":"ContainerDied","Data":"434d2f0fd4bdf21652187405bb68b668ffac02c703bc03b9a8cd50ddfa14fcfc"} Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.464656 4822 scope.go:117] "RemoveContainer" containerID="f93e25c716741bed2a90c4c87c1254c43c9792eb39b413bf31bd2b145d71cc81" Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.464744 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5gkcw" Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.495930 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5gkcw"] Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.502065 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5gkcw"] Dec 01 06:54:03 crc kubenswrapper[4822]: I1201 06:54:03.515380 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:54:04 crc kubenswrapper[4822]: I1201 06:54:04.344652 4822 scope.go:117] "RemoveContainer" containerID="2d0d6b1cb2c9e3dfd3ecaa0299d628c28600f277a914cb45e3769052d5512d0d" Dec 01 06:54:04 crc kubenswrapper[4822]: I1201 06:54:04.607368 4822 scope.go:117] "RemoveContainer" containerID="bd65cca94353e87e69ecc107010f89211facc1824e260582cf587b5aede9e7a0" Dec 01 06:54:04 crc kubenswrapper[4822]: I1201 06:54:04.961655 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" path="/var/lib/kubelet/pods/dbbcf3ab-f810-4bb6-b401-f9365b66b6e9/volumes" Dec 01 06:54:05 crc kubenswrapper[4822]: I1201 06:54:05.621627 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-smgxk"] Dec 01 06:54:05 crc kubenswrapper[4822]: I1201 06:54:05.622376 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-smgxk" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="registry-server" containerID="cri-o://85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3" gracePeriod=2 Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.259245 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.381457 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-utilities\") pod \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.381618 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-catalog-content\") pod \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.381696 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h6cn\" (UniqueName: \"kubernetes.io/projected/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-kube-api-access-4h6cn\") pod \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\" (UID: \"4eaba10b-a8b4-4079-a55c-0807ca25b2e4\") " Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.382893 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-utilities" (OuterVolumeSpecName: "utilities") pod "4eaba10b-a8b4-4079-a55c-0807ca25b2e4" (UID: "4eaba10b-a8b4-4079-a55c-0807ca25b2e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.390572 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-kube-api-access-4h6cn" (OuterVolumeSpecName: "kube-api-access-4h6cn") pod "4eaba10b-a8b4-4079-a55c-0807ca25b2e4" (UID: "4eaba10b-a8b4-4079-a55c-0807ca25b2e4"). InnerVolumeSpecName "kube-api-access-4h6cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.482782 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.482818 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h6cn\" (UniqueName: \"kubernetes.io/projected/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-kube-api-access-4h6cn\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.487426 4822 generic.go:334] "Generic (PLEG): container finished" podID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerID="85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3" exitCode=0 Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.487473 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smgxk" event={"ID":"4eaba10b-a8b4-4079-a55c-0807ca25b2e4","Type":"ContainerDied","Data":"85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3"} Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.487521 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smgxk" event={"ID":"4eaba10b-a8b4-4079-a55c-0807ca25b2e4","Type":"ContainerDied","Data":"e05e3cb2cbe27804f9145962a729541c76c2e65da4b7a90577c3b3f24c3a9b58"} Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.487539 4822 scope.go:117] "RemoveContainer" containerID="85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.487622 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smgxk" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.490085 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tv5ck" event={"ID":"d9639beb-e6eb-42eb-9937-1b89a6be6100","Type":"ContainerStarted","Data":"4a1f800c3a1ccfd1c33813cac10bc6658ec0b3fdf8ca131d794acbdb4f8381cb"} Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.501294 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ghqm7" event={"ID":"d0d1fe31-3d9d-4422-9b5b-d73653be78c8","Type":"ContainerStarted","Data":"498a1484ed2761a07c9680387478e95ba14e79d7b176a86df5bcfca9b87f0427"} Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.504056 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4eaba10b-a8b4-4079-a55c-0807ca25b2e4" (UID: "4eaba10b-a8b4-4079-a55c-0807ca25b2e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.509588 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kwtsp" event={"ID":"a693d2fd-8db7-4699-ad03-e70175ead53c","Type":"ContainerStarted","Data":"ee1916eaa5c200d71e32b7a4cfc966a3a97b6ca6a9ae2c9c070ba1176f4cd358"} Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.516521 4822 scope.go:117] "RemoveContainer" containerID="d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.520098 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tv5ck" podStartSLOduration=4.438262615 podStartE2EDuration="56.520077101s" podCreationTimestamp="2025-12-01 06:53:10 +0000 UTC" firstStartedPulling="2025-12-01 06:53:13.841300103 +0000 UTC m=+149.162107789" lastFinishedPulling="2025-12-01 06:54:05.923114589 +0000 UTC m=+201.243922275" observedRunningTime="2025-12-01 06:54:06.510714262 +0000 UTC m=+201.831521948" watchObservedRunningTime="2025-12-01 06:54:06.520077101 +0000 UTC m=+201.840884787" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.522512 4822 generic.go:334] "Generic (PLEG): container finished" podID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerID="59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b" exitCode=0 Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.522563 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tczn" event={"ID":"05a1348c-7ab2-414b-a0ab-2e48808a2d4d","Type":"ContainerDied","Data":"59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b"} Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.533059 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ghqm7" podStartSLOduration=3.7721676950000003 podStartE2EDuration="55.53303114s" podCreationTimestamp="2025-12-01 06:53:11 +0000 UTC" firstStartedPulling="2025-12-01 06:53:13.812634687 +0000 UTC m=+149.133442373" lastFinishedPulling="2025-12-01 06:54:05.573498122 +0000 UTC m=+200.894305818" observedRunningTime="2025-12-01 06:54:06.530234409 +0000 UTC m=+201.851042095" watchObservedRunningTime="2025-12-01 06:54:06.53303114 +0000 UTC m=+201.853838816" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.539211 4822 scope.go:117] "RemoveContainer" containerID="d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.557010 4822 scope.go:117] "RemoveContainer" containerID="85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3" Dec 01 06:54:06 crc kubenswrapper[4822]: E1201 06:54:06.558149 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3\": container with ID starting with 85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3 not found: ID does not exist" containerID="85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.558258 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3"} err="failed to get container status \"85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3\": rpc error: code = NotFound desc = could not find container \"85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3\": container with ID starting with 85e5df3a2dcf8a21876aaca78395bfa945f151d05daa9d0091cfb0178ff37aa3 not found: ID does not exist" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.558354 4822 scope.go:117] "RemoveContainer" containerID="d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5" Dec 01 06:54:06 crc kubenswrapper[4822]: E1201 06:54:06.558741 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5\": container with ID starting with d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5 not found: ID does not exist" containerID="d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.558780 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5"} err="failed to get container status \"d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5\": rpc error: code = NotFound desc = could not find container \"d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5\": container with ID starting with d8bd3574f1d04895d21d146b9bb82d32b1afd0af3aa41931f2ba5c1fd419bfd5 not found: ID does not exist" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.558809 4822 scope.go:117] "RemoveContainer" containerID="d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068" Dec 01 06:54:06 crc kubenswrapper[4822]: E1201 06:54:06.559015 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068\": container with ID starting with d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068 not found: ID does not exist" containerID="d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.559054 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068"} err="failed to get container status \"d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068\": rpc error: code = NotFound desc = could not find container \"d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068\": container with ID starting with d0cc710d4f49c7bc4b33be110c854d6067b258d7cdf4004d6f813c8e7cee3068 not found: ID does not exist" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.560759 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kwtsp" podStartSLOduration=5.437035776 podStartE2EDuration="58.560747043s" podCreationTimestamp="2025-12-01 06:53:08 +0000 UTC" firstStartedPulling="2025-12-01 06:53:12.800751581 +0000 UTC m=+148.121559267" lastFinishedPulling="2025-12-01 06:54:05.924462818 +0000 UTC m=+201.245270534" observedRunningTime="2025-12-01 06:54:06.556730698 +0000 UTC m=+201.879717873" watchObservedRunningTime="2025-12-01 06:54:06.560747043 +0000 UTC m=+201.881554729" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.585458 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eaba10b-a8b4-4079-a55c-0807ca25b2e4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.819443 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-smgxk"] Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.823302 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-smgxk"] Dec 01 06:54:06 crc kubenswrapper[4822]: I1201 06:54:06.958896 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" path="/var/lib/kubelet/pods/4eaba10b-a8b4-4079-a55c-0807ca25b2e4/volumes" Dec 01 06:54:07 crc kubenswrapper[4822]: I1201 06:54:07.531766 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tczn" event={"ID":"05a1348c-7ab2-414b-a0ab-2e48808a2d4d","Type":"ContainerStarted","Data":"44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573"} Dec 01 06:54:07 crc kubenswrapper[4822]: I1201 06:54:07.550088 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2tczn" podStartSLOduration=4.064195326 podStartE2EDuration="58.550065229s" podCreationTimestamp="2025-12-01 06:53:09 +0000 UTC" firstStartedPulling="2025-12-01 06:53:12.680581509 +0000 UTC m=+148.001389195" lastFinishedPulling="2025-12-01 06:54:07.166451412 +0000 UTC m=+202.487259098" observedRunningTime="2025-12-01 06:54:07.547918342 +0000 UTC m=+202.868726038" watchObservedRunningTime="2025-12-01 06:54:07.550065229 +0000 UTC m=+202.870872915" Dec 01 06:54:09 crc kubenswrapper[4822]: I1201 06:54:09.619440 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:54:09 crc kubenswrapper[4822]: I1201 06:54:09.619520 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:54:09 crc kubenswrapper[4822]: I1201 06:54:09.674909 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:54:09 crc kubenswrapper[4822]: I1201 06:54:09.984167 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:54:09 crc kubenswrapper[4822]: I1201 06:54:09.984717 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:54:10 crc kubenswrapper[4822]: I1201 06:54:10.025240 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.244940 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.245284 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.291995 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.614916 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.637001 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.637087 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:54:11 crc kubenswrapper[4822]: I1201 06:54:11.735076 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.452151 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" podUID="fffc4838-3f04-4867-b948-b40f642203de" containerName="oauth-openshift" containerID="cri-o://c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca" gracePeriod=15 Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.543313 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.543391 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.543445 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.544258 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.544334 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3" gracePeriod=600 Dec 01 06:54:12 crc kubenswrapper[4822]: I1201 06:54:12.631365 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.388351 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487261 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fffc4838-3f04-4867-b948-b40f642203de-audit-dir\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487330 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-serving-cert\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487382 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-provider-selection\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487409 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-cliconfig\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487398 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fffc4838-3f04-4867-b948-b40f642203de-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487442 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-ocp-branding-template\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487606 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-trusted-ca-bundle\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487651 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-service-ca\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487694 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-router-certs\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487747 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-audit-policies\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487796 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-idp-0-file-data\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487834 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4k2f\" (UniqueName: \"kubernetes.io/projected/fffc4838-3f04-4867-b948-b40f642203de-kube-api-access-f4k2f\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487896 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-session\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487933 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-login\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.487983 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-error\") pod \"fffc4838-3f04-4867-b948-b40f642203de\" (UID: \"fffc4838-3f04-4867-b948-b40f642203de\") " Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.488681 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.488730 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.488838 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.488987 4822 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fffc4838-3f04-4867-b948-b40f642203de-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.489014 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.489031 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.489045 4822 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.490059 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.494874 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fffc4838-3f04-4867-b948-b40f642203de-kube-api-access-f4k2f" (OuterVolumeSpecName: "kube-api-access-f4k2f") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "kube-api-access-f4k2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.494911 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.495337 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.496022 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.496641 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.497630 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.498382 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.501848 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.504185 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "fffc4838-3f04-4867-b948-b40f642203de" (UID: "fffc4838-3f04-4867-b948-b40f642203de"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.580977 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3" exitCode=0 Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.581066 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3"} Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.581108 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"722f940bf0dc20661a5ffefc04398ee7a3b154f90f89e6da1487f7220d930232"} Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.584366 4822 generic.go:334] "Generic (PLEG): container finished" podID="fffc4838-3f04-4867-b948-b40f642203de" containerID="c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca" exitCode=0 Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.584450 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" event={"ID":"fffc4838-3f04-4867-b948-b40f642203de","Type":"ContainerDied","Data":"c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca"} Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.584525 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.584737 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-jq95c" event={"ID":"fffc4838-3f04-4867-b948-b40f642203de","Type":"ContainerDied","Data":"5753a5b7cb602e932561a7e7c5795c49d3018f9a179d71c6663b4ccb0b3898b2"} Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.584778 4822 scope.go:117] "RemoveContainer" containerID="c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590777 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590833 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590851 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590866 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590880 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4k2f\" (UniqueName: \"kubernetes.io/projected/fffc4838-3f04-4867-b948-b40f642203de-kube-api-access-f4k2f\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590921 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590934 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590948 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.590962 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.591003 4822 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/fffc4838-3f04-4867-b948-b40f642203de-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.615816 4822 scope.go:117] "RemoveContainer" containerID="c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca" Dec 01 06:54:13 crc kubenswrapper[4822]: E1201 06:54:13.616990 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca\": container with ID starting with c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca not found: ID does not exist" containerID="c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.617085 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca"} err="failed to get container status \"c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca\": rpc error: code = NotFound desc = could not find container \"c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca\": container with ID starting with c5a436c161cf317e779ce3e25dbc8f757b9421395d905c3c897c53ea7ccdbcca not found: ID does not exist" Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.626031 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-jq95c"] Dec 01 06:54:13 crc kubenswrapper[4822]: I1201 06:54:13.637445 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-jq95c"] Dec 01 06:54:14 crc kubenswrapper[4822]: I1201 06:54:14.962889 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fffc4838-3f04-4867-b948-b40f642203de" path="/var/lib/kubelet/pods/fffc4838-3f04-4867-b948-b40f642203de/volumes" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.421949 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ghqm7"] Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.422372 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ghqm7" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="registry-server" containerID="cri-o://498a1484ed2761a07c9680387478e95ba14e79d7b176a86df5bcfca9b87f0427" gracePeriod=2 Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.436406 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f"] Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.436839 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="extract-content" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.436882 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="extract-content" Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.436915 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="extract-utilities" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.436936 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="extract-utilities" Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.436963 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="extract-content" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.436983 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="extract-content" Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.437016 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="extract-utilities" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437034 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="extract-utilities" Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.437062 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fffc4838-3f04-4867-b948-b40f642203de" containerName="oauth-openshift" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437079 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fffc4838-3f04-4867-b948-b40f642203de" containerName="oauth-openshift" Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.437097 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="registry-server" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437114 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="registry-server" Dec 01 06:54:15 crc kubenswrapper[4822]: E1201 06:54:15.437145 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="registry-server" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437161 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="registry-server" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437395 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fffc4838-3f04-4867-b948-b40f642203de" containerName="oauth-openshift" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437432 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eaba10b-a8b4-4079-a55c-0807ca25b2e4" containerName="registry-server" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.437468 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbbcf3ab-f810-4bb6-b401-f9365b66b6e9" containerName="registry-server" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.438426 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.444323 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.444628 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.444755 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.444912 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.445172 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.453640 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.454336 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.456055 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.456965 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.457193 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.457347 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.461965 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.474760 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.477785 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.490005 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f"] Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.500012 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518193 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518284 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518328 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518374 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjg7m\" (UniqueName: \"kubernetes.io/projected/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-kube-api-access-kjg7m\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518435 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-router-certs\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518472 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-audit-dir\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518522 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-audit-policies\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518623 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-service-ca\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518691 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-session\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518744 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-error\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518784 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518835 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518873 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.518918 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-login\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.605077 4822 generic.go:334] "Generic (PLEG): container finished" podID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerID="498a1484ed2761a07c9680387478e95ba14e79d7b176a86df5bcfca9b87f0427" exitCode=0 Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.605143 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ghqm7" event={"ID":"d0d1fe31-3d9d-4422-9b5b-d73653be78c8","Type":"ContainerDied","Data":"498a1484ed2761a07c9680387478e95ba14e79d7b176a86df5bcfca9b87f0427"} Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.620247 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.620879 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.620917 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.620947 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjg7m\" (UniqueName: \"kubernetes.io/projected/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-kube-api-access-kjg7m\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.620982 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-router-certs\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621003 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-audit-dir\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621033 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-audit-policies\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621065 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-service-ca\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621089 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-session\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621111 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-error\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621141 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621175 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621195 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.621223 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-login\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.622174 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-service-ca\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.622271 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-audit-dir\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.622894 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-audit-policies\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.624687 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.626445 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.627178 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-login\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.627210 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.628347 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.628449 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.628873 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-router-certs\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.635464 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-user-template-error\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.635515 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-session\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.635974 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.639877 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjg7m\" (UniqueName: \"kubernetes.io/projected/85febf68-c5d6-46cd-b2d3-b1b495e69ed9-kube-api-access-kjg7m\") pod \"oauth-openshift-6c5fcdcf5-8gp4f\" (UID: \"85febf68-c5d6-46cd-b2d3-b1b495e69ed9\") " pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.787308 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.823888 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdbcr\" (UniqueName: \"kubernetes.io/projected/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-kube-api-access-mdbcr\") pod \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.823982 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-catalog-content\") pod \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.824041 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-utilities\") pod \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\" (UID: \"d0d1fe31-3d9d-4422-9b5b-d73653be78c8\") " Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.824995 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-utilities" (OuterVolumeSpecName: "utilities") pod "d0d1fe31-3d9d-4422-9b5b-d73653be78c8" (UID: "d0d1fe31-3d9d-4422-9b5b-d73653be78c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.827006 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-kube-api-access-mdbcr" (OuterVolumeSpecName: "kube-api-access-mdbcr") pod "d0d1fe31-3d9d-4422-9b5b-d73653be78c8" (UID: "d0d1fe31-3d9d-4422-9b5b-d73653be78c8"). InnerVolumeSpecName "kube-api-access-mdbcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.832673 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.840860 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d0d1fe31-3d9d-4422-9b5b-d73653be78c8" (UID: "d0d1fe31-3d9d-4422-9b5b-d73653be78c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.926092 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdbcr\" (UniqueName: \"kubernetes.io/projected/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-kube-api-access-mdbcr\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.926133 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:15 crc kubenswrapper[4822]: I1201 06:54:15.926146 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d1fe31-3d9d-4422-9b5b-d73653be78c8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.225759 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f"] Dec 01 06:54:16 crc kubenswrapper[4822]: W1201 06:54:16.231113 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85febf68_c5d6_46cd_b2d3_b1b495e69ed9.slice/crio-617fee6603abbe1d05a0d24971b8c2ecd4f6951702a57bace0728c8697bda152 WatchSource:0}: Error finding container 617fee6603abbe1d05a0d24971b8c2ecd4f6951702a57bace0728c8697bda152: Status 404 returned error can't find the container with id 617fee6603abbe1d05a0d24971b8c2ecd4f6951702a57bace0728c8697bda152 Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.621419 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ghqm7" event={"ID":"d0d1fe31-3d9d-4422-9b5b-d73653be78c8","Type":"ContainerDied","Data":"9c70fca2c35a7113a9096ab928a2727a5d7d9cac154660c6f5edcdd219ec52b2"} Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.621510 4822 scope.go:117] "RemoveContainer" containerID="498a1484ed2761a07c9680387478e95ba14e79d7b176a86df5bcfca9b87f0427" Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.621763 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ghqm7" Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.626527 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" event={"ID":"85febf68-c5d6-46cd-b2d3-b1b495e69ed9","Type":"ContainerStarted","Data":"617fee6603abbe1d05a0d24971b8c2ecd4f6951702a57bace0728c8697bda152"} Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.643921 4822 scope.go:117] "RemoveContainer" containerID="269b55bcc3195d64d861281f320247ae0cf4295405bd1681738a6e79d7359e7d" Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.675112 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ghqm7"] Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.682139 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ghqm7"] Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.693182 4822 scope.go:117] "RemoveContainer" containerID="b322e5672e69c572b77dc69834d652fd90053e0a7003f1b89533525f1c618052" Dec 01 06:54:16 crc kubenswrapper[4822]: I1201 06:54:16.959404 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" path="/var/lib/kubelet/pods/d0d1fe31-3d9d-4422-9b5b-d73653be78c8/volumes" Dec 01 06:54:17 crc kubenswrapper[4822]: I1201 06:54:17.636688 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" event={"ID":"85febf68-c5d6-46cd-b2d3-b1b495e69ed9","Type":"ContainerStarted","Data":"cfa1128633f0e82ee924ba02657d2fa604d7ab8cdbfcacc4a9fd8047714baf49"} Dec 01 06:54:17 crc kubenswrapper[4822]: I1201 06:54:17.637016 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:17 crc kubenswrapper[4822]: I1201 06:54:17.645016 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" Dec 01 06:54:17 crc kubenswrapper[4822]: I1201 06:54:17.669149 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6c5fcdcf5-8gp4f" podStartSLOduration=30.669117744 podStartE2EDuration="30.669117744s" podCreationTimestamp="2025-12-01 06:53:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:54:17.663363226 +0000 UTC m=+212.984170962" watchObservedRunningTime="2025-12-01 06:54:17.669117744 +0000 UTC m=+212.989925420" Dec 01 06:54:19 crc kubenswrapper[4822]: I1201 06:54:19.670477 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:54:20 crc kubenswrapper[4822]: I1201 06:54:20.047340 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:54:20 crc kubenswrapper[4822]: I1201 06:54:20.222036 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tczn"] Dec 01 06:54:20 crc kubenswrapper[4822]: I1201 06:54:20.656649 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2tczn" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="registry-server" containerID="cri-o://44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573" gracePeriod=2 Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.053529 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.124893 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf4s4\" (UniqueName: \"kubernetes.io/projected/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-kube-api-access-rf4s4\") pod \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.125172 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-catalog-content\") pod \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.125317 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-utilities\") pod \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\" (UID: \"05a1348c-7ab2-414b-a0ab-2e48808a2d4d\") " Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.126766 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-utilities" (OuterVolumeSpecName: "utilities") pod "05a1348c-7ab2-414b-a0ab-2e48808a2d4d" (UID: "05a1348c-7ab2-414b-a0ab-2e48808a2d4d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.134747 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-kube-api-access-rf4s4" (OuterVolumeSpecName: "kube-api-access-rf4s4") pod "05a1348c-7ab2-414b-a0ab-2e48808a2d4d" (UID: "05a1348c-7ab2-414b-a0ab-2e48808a2d4d"). InnerVolumeSpecName "kube-api-access-rf4s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.175414 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05a1348c-7ab2-414b-a0ab-2e48808a2d4d" (UID: "05a1348c-7ab2-414b-a0ab-2e48808a2d4d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.227703 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf4s4\" (UniqueName: \"kubernetes.io/projected/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-kube-api-access-rf4s4\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.227781 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.227803 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a1348c-7ab2-414b-a0ab-2e48808a2d4d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.669475 4822 generic.go:334] "Generic (PLEG): container finished" podID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerID="44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573" exitCode=0 Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.669637 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tczn" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.669679 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tczn" event={"ID":"05a1348c-7ab2-414b-a0ab-2e48808a2d4d","Type":"ContainerDied","Data":"44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573"} Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.669860 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tczn" event={"ID":"05a1348c-7ab2-414b-a0ab-2e48808a2d4d","Type":"ContainerDied","Data":"40039e8377920226b1ad8d27fb9558cc8393c45a4a44a3951f932f344e03226e"} Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.669964 4822 scope.go:117] "RemoveContainer" containerID="44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.716383 4822 scope.go:117] "RemoveContainer" containerID="59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.732212 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tczn"] Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.736474 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2tczn"] Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.758748 4822 scope.go:117] "RemoveContainer" containerID="88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.780936 4822 scope.go:117] "RemoveContainer" containerID="44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573" Dec 01 06:54:21 crc kubenswrapper[4822]: E1201 06:54:21.781947 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573\": container with ID starting with 44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573 not found: ID does not exist" containerID="44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.782010 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573"} err="failed to get container status \"44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573\": rpc error: code = NotFound desc = could not find container \"44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573\": container with ID starting with 44e46e4b9749091a43e57065306e4a9392f345f350a6eeab1e1df55aa5f53573 not found: ID does not exist" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.782048 4822 scope.go:117] "RemoveContainer" containerID="59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b" Dec 01 06:54:21 crc kubenswrapper[4822]: E1201 06:54:21.782743 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b\": container with ID starting with 59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b not found: ID does not exist" containerID="59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.782809 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b"} err="failed to get container status \"59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b\": rpc error: code = NotFound desc = could not find container \"59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b\": container with ID starting with 59d93be216e996edd5b903659a12c3f13e17f6ef4ef9a298caf196cc27c8ab1b not found: ID does not exist" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.782861 4822 scope.go:117] "RemoveContainer" containerID="88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c" Dec 01 06:54:21 crc kubenswrapper[4822]: E1201 06:54:21.783888 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c\": container with ID starting with 88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c not found: ID does not exist" containerID="88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c" Dec 01 06:54:21 crc kubenswrapper[4822]: I1201 06:54:21.783924 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c"} err="failed to get container status \"88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c\": rpc error: code = NotFound desc = could not find container \"88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c\": container with ID starting with 88fc4808afc676bdf3136aa1b6e6fe9c8a9b89885abbdfb3f66af51cb413ca5c not found: ID does not exist" Dec 01 06:54:22 crc kubenswrapper[4822]: I1201 06:54:22.959503 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" path="/var/lib/kubelet/pods/05a1348c-7ab2-414b-a0ab-2e48808a2d4d/volumes" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.226915 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kwtsp"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.228693 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kwtsp" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="registry-server" containerID="cri-o://ee1916eaa5c200d71e32b7a4cfc966a3a97b6ca6a9ae2c9c070ba1176f4cd358" gracePeriod=30 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.240865 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jrmvv"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.241494 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jrmvv" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="registry-server" containerID="cri-o://df6b8210f80f3c995a39421f97443e6b7fc4716fb346ffee5aa6af3601c32e73" gracePeriod=30 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.246899 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5c4b8"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.247988 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" containerName="marketplace-operator" containerID="cri-o://e1d198ad55d4d95796d0e2c18424e17e7cf4654042df800a3761692ce7934e25" gracePeriod=30 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.279556 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fltjl"] Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.280182 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="extract-content" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280214 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="extract-content" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.280239 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="extract-utilities" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280249 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="extract-utilities" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.280258 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="extract-utilities" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280265 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="extract-utilities" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.280282 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="registry-server" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280289 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="registry-server" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.280305 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="extract-content" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280315 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="extract-content" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.280331 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="registry-server" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280339 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="registry-server" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280528 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d1fe31-3d9d-4422-9b5b-d73653be78c8" containerName="registry-server" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.280563 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="05a1348c-7ab2-414b-a0ab-2e48808a2d4d" containerName="registry-server" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.281156 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.291091 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tv5ck"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.291432 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tv5ck" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="registry-server" containerID="cri-o://4a1f800c3a1ccfd1c33813cac10bc6658ec0b3fdf8ca131d794acbdb4f8381cb" gracePeriod=30 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.295805 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fltjl"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.328806 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbmgc"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.329365 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nbmgc" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="registry-server" containerID="cri-o://54d15f111dca7ffbab3597e812bb17c250186ec9ea700cc8aaa3bfa8d317dac6" gracePeriod=30 Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.353654 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee211f87_9d8b_4a63_9d2e_7f2b7aa54ecd.slice/crio-df6b8210f80f3c995a39421f97443e6b7fc4716fb346ffee5aa6af3601c32e73.scope\": RecentStats: unable to find data in memory cache]" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.369590 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.370084 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9m6r\" (UniqueName: \"kubernetes.io/projected/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-kube-api-access-m9m6r\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.370116 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.471316 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.471433 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.471509 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9m6r\" (UniqueName: \"kubernetes.io/projected/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-kube-api-access-m9m6r\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.475424 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.480140 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.494038 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9m6r\" (UniqueName: \"kubernetes.io/projected/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74-kube-api-access-m9m6r\") pod \"marketplace-operator-79b997595-fltjl\" (UID: \"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\") " pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.544630 4822 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.545154 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31" gracePeriod=15 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.545336 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e" gracePeriod=15 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.545397 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d" gracePeriod=15 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.545512 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11" gracePeriod=15 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.545534 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d" gracePeriod=15 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550135 4822 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550465 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550483 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550498 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550505 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550517 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550525 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550543 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550552 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550560 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550565 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550597 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550607 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.550620 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550628 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550806 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550821 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550834 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550842 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550852 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.550862 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.553856 4822 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.554455 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.560756 4822 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.616626 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.674378 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675005 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675034 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675073 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675102 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675258 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675316 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.675356 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: E1201 06:54:37.733966 4822 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.212:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776491 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776539 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776648 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776686 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776710 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776739 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776786 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776811 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776890 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776933 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776956 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.776979 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.777006 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.777040 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.777079 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.777106 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.792445 4822 generic.go:334] "Generic (PLEG): container finished" podID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerID="ee1916eaa5c200d71e32b7a4cfc966a3a97b6ca6a9ae2c9c070ba1176f4cd358" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.792794 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kwtsp" event={"ID":"a693d2fd-8db7-4699-ad03-e70175ead53c","Type":"ContainerDied","Data":"ee1916eaa5c200d71e32b7a4cfc966a3a97b6ca6a9ae2c9c070ba1176f4cd358"} Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.800109 4822 generic.go:334] "Generic (PLEG): container finished" podID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerID="df6b8210f80f3c995a39421f97443e6b7fc4716fb346ffee5aa6af3601c32e73" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.800149 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jrmvv" event={"ID":"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd","Type":"ContainerDied","Data":"df6b8210f80f3c995a39421f97443e6b7fc4716fb346ffee5aa6af3601c32e73"} Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.804161 4822 generic.go:334] "Generic (PLEG): container finished" podID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerID="54d15f111dca7ffbab3597e812bb17c250186ec9ea700cc8aaa3bfa8d317dac6" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.804217 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerDied","Data":"54d15f111dca7ffbab3597e812bb17c250186ec9ea700cc8aaa3bfa8d317dac6"} Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.806743 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.808508 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.811847 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.811883 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.811895 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.811903 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11" exitCode=2 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.812175 4822 scope.go:117] "RemoveContainer" containerID="dba05fb84c38363f855d6ebc993e7afff0b49fab83059ad42e77e9e47f688fbf" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.816530 4822 generic.go:334] "Generic (PLEG): container finished" podID="5485f8d6-493f-4b42-88a6-363043c13a90" containerID="e1d198ad55d4d95796d0e2c18424e17e7cf4654042df800a3761692ce7934e25" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.816625 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" event={"ID":"5485f8d6-493f-4b42-88a6-363043c13a90","Type":"ContainerDied","Data":"e1d198ad55d4d95796d0e2c18424e17e7cf4654042df800a3761692ce7934e25"} Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.821717 4822 generic.go:334] "Generic (PLEG): container finished" podID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerID="4a1f800c3a1ccfd1c33813cac10bc6658ec0b3fdf8ca131d794acbdb4f8381cb" exitCode=0 Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.821775 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tv5ck" event={"ID":"d9639beb-e6eb-42eb-9937-1b89a6be6100","Type":"ContainerDied","Data":"4a1f800c3a1ccfd1c33813cac10bc6658ec0b3fdf8ca131d794acbdb4f8381cb"} Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.863934 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.864796 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.871082 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.872148 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.872563 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.982255 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-catalog-content\") pod \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.982316 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-utilities\") pod \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.982339 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-utilities\") pod \"d9639beb-e6eb-42eb-9937-1b89a6be6100\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.982373 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsjhx\" (UniqueName: \"kubernetes.io/projected/c43bed42-ab14-48f1-a79d-dca61de1b6f1-kube-api-access-wsjhx\") pod \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\" (UID: \"c43bed42-ab14-48f1-a79d-dca61de1b6f1\") " Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.982481 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdrs2\" (UniqueName: \"kubernetes.io/projected/d9639beb-e6eb-42eb-9937-1b89a6be6100-kube-api-access-pdrs2\") pod \"d9639beb-e6eb-42eb-9937-1b89a6be6100\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.982551 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-catalog-content\") pod \"d9639beb-e6eb-42eb-9937-1b89a6be6100\" (UID: \"d9639beb-e6eb-42eb-9937-1b89a6be6100\") " Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.984206 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-utilities" (OuterVolumeSpecName: "utilities") pod "d9639beb-e6eb-42eb-9937-1b89a6be6100" (UID: "d9639beb-e6eb-42eb-9937-1b89a6be6100"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.987174 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-utilities" (OuterVolumeSpecName: "utilities") pod "c43bed42-ab14-48f1-a79d-dca61de1b6f1" (UID: "c43bed42-ab14-48f1-a79d-dca61de1b6f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.990827 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9639beb-e6eb-42eb-9937-1b89a6be6100-kube-api-access-pdrs2" (OuterVolumeSpecName: "kube-api-access-pdrs2") pod "d9639beb-e6eb-42eb-9937-1b89a6be6100" (UID: "d9639beb-e6eb-42eb-9937-1b89a6be6100"). InnerVolumeSpecName "kube-api-access-pdrs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:37 crc kubenswrapper[4822]: I1201 06:54:37.991301 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c43bed42-ab14-48f1-a79d-dca61de1b6f1-kube-api-access-wsjhx" (OuterVolumeSpecName: "kube-api-access-wsjhx") pod "c43bed42-ab14-48f1-a79d-dca61de1b6f1" (UID: "c43bed42-ab14-48f1-a79d-dca61de1b6f1"). InnerVolumeSpecName "kube-api-access-wsjhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.001247 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9639beb-e6eb-42eb-9937-1b89a6be6100" (UID: "d9639beb-e6eb-42eb-9937-1b89a6be6100"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.038991 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.084223 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.084263 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.084280 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsjhx\" (UniqueName: \"kubernetes.io/projected/c43bed42-ab14-48f1-a79d-dca61de1b6f1-kube-api-access-wsjhx\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.084299 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdrs2\" (UniqueName: \"kubernetes.io/projected/d9639beb-e6eb-42eb-9937-1b89a6be6100-kube-api-access-pdrs2\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.084344 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9639beb-e6eb-42eb-9937-1b89a6be6100-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: E1201 06:54:38.088306 4822 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.212:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d04f4b6fb9381 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:54:38.087574401 +0000 UTC m=+233.408382097,LastTimestamp:2025-12-01 06:54:38.087574401 +0000 UTC m=+233.408382097,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.112051 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c43bed42-ab14-48f1-a79d-dca61de1b6f1" (UID: "c43bed42-ab14-48f1-a79d-dca61de1b6f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.185538 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43bed42-ab14-48f1-a79d-dca61de1b6f1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.200608 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.201550 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.202162 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.202785 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.205848 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.206901 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.207325 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.207718 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.207993 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.215106 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.216857 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.217470 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.217992 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.218217 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.218428 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.286959 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc49q\" (UniqueName: \"kubernetes.io/projected/5485f8d6-493f-4b42-88a6-363043c13a90-kube-api-access-fc49q\") pod \"5485f8d6-493f-4b42-88a6-363043c13a90\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287027 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvwh2\" (UniqueName: \"kubernetes.io/projected/a693d2fd-8db7-4699-ad03-e70175ead53c-kube-api-access-bvwh2\") pod \"a693d2fd-8db7-4699-ad03-e70175ead53c\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287110 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-trusted-ca\") pod \"5485f8d6-493f-4b42-88a6-363043c13a90\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287145 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-utilities\") pod \"a693d2fd-8db7-4699-ad03-e70175ead53c\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287193 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-catalog-content\") pod \"a693d2fd-8db7-4699-ad03-e70175ead53c\" (UID: \"a693d2fd-8db7-4699-ad03-e70175ead53c\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287224 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx488\" (UniqueName: \"kubernetes.io/projected/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-kube-api-access-mx488\") pod \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287279 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-catalog-content\") pod \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287300 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-operator-metrics\") pod \"5485f8d6-493f-4b42-88a6-363043c13a90\" (UID: \"5485f8d6-493f-4b42-88a6-363043c13a90\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.287317 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-utilities\") pod \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\" (UID: \"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd\") " Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.288457 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-utilities" (OuterVolumeSpecName: "utilities") pod "a693d2fd-8db7-4699-ad03-e70175ead53c" (UID: "a693d2fd-8db7-4699-ad03-e70175ead53c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.289061 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "5485f8d6-493f-4b42-88a6-363043c13a90" (UID: "5485f8d6-493f-4b42-88a6-363043c13a90"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.289084 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-utilities" (OuterVolumeSpecName: "utilities") pod "ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" (UID: "ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.292751 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "5485f8d6-493f-4b42-88a6-363043c13a90" (UID: "5485f8d6-493f-4b42-88a6-363043c13a90"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.295774 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-kube-api-access-mx488" (OuterVolumeSpecName: "kube-api-access-mx488") pod "ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" (UID: "ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd"). InnerVolumeSpecName "kube-api-access-mx488". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.310995 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a693d2fd-8db7-4699-ad03-e70175ead53c-kube-api-access-bvwh2" (OuterVolumeSpecName: "kube-api-access-bvwh2") pod "a693d2fd-8db7-4699-ad03-e70175ead53c" (UID: "a693d2fd-8db7-4699-ad03-e70175ead53c"). InnerVolumeSpecName "kube-api-access-bvwh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.318946 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5485f8d6-493f-4b42-88a6-363043c13a90-kube-api-access-fc49q" (OuterVolumeSpecName: "kube-api-access-fc49q") pod "5485f8d6-493f-4b42-88a6-363043c13a90" (UID: "5485f8d6-493f-4b42-88a6-363043c13a90"). InnerVolumeSpecName "kube-api-access-fc49q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.379301 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a693d2fd-8db7-4699-ad03-e70175ead53c" (UID: "a693d2fd-8db7-4699-ad03-e70175ead53c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.383126 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" (UID: "ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389447 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc49q\" (UniqueName: \"kubernetes.io/projected/5485f8d6-493f-4b42-88a6-363043c13a90-kube-api-access-fc49q\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389499 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvwh2\" (UniqueName: \"kubernetes.io/projected/a693d2fd-8db7-4699-ad03-e70175ead53c-kube-api-access-bvwh2\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389511 4822 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389524 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389538 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a693d2fd-8db7-4699-ad03-e70175ead53c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389548 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx488\" (UniqueName: \"kubernetes.io/projected/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-kube-api-access-mx488\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389589 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389601 4822 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5485f8d6-493f-4b42-88a6-363043c13a90-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.389612 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:38 crc kubenswrapper[4822]: E1201 06:54:38.394919 4822 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 01 06:54:38 crc kubenswrapper[4822]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef" Netns:"/var/run/netns/1a0ce725-a7b1-46a9-8f64-e632c362fc35" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:54:38 crc kubenswrapper[4822]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 06:54:38 crc kubenswrapper[4822]: > Dec 01 06:54:38 crc kubenswrapper[4822]: E1201 06:54:38.395250 4822 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 01 06:54:38 crc kubenswrapper[4822]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef" Netns:"/var/run/netns/1a0ce725-a7b1-46a9-8f64-e632c362fc35" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:54:38 crc kubenswrapper[4822]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 06:54:38 crc kubenswrapper[4822]: > pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:38 crc kubenswrapper[4822]: E1201 06:54:38.395278 4822 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 01 06:54:38 crc kubenswrapper[4822]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef" Netns:"/var/run/netns/1a0ce725-a7b1-46a9-8f64-e632c362fc35" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:54:38 crc kubenswrapper[4822]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 06:54:38 crc kubenswrapper[4822]: > pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:38 crc kubenswrapper[4822]: E1201 06:54:38.395340 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef\\\" Netns:\\\"/var/run/netns/1a0ce725-a7b1-46a9-8f64-e632c362fc35\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=edd40dd319fc9d52c84278d63d08f5638de327a26eee093180d11ee77ffaf3ef;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s\\\": dial tcp 38.102.83.212:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podUID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.832295 4822 generic.go:334] "Generic (PLEG): container finished" podID="3fec124f-3cc9-4991-85b7-3a73de687907" containerID="989252238d905c784261fe06430262674d7606cf6fed841e2361925fa7ff693d" exitCode=0 Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.832440 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3fec124f-3cc9-4991-85b7-3a73de687907","Type":"ContainerDied","Data":"989252238d905c784261fe06430262674d7606cf6fed841e2361925fa7ff693d"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.834021 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.834545 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.835161 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.835569 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.835939 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.836264 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.836383 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kwtsp" event={"ID":"a693d2fd-8db7-4699-ad03-e70175ead53c","Type":"ContainerDied","Data":"9b55860dde902580635464244db736952dc13cb439db030956facb88205d9d63"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.836458 4822 scope.go:117] "RemoveContainer" containerID="ee1916eaa5c200d71e32b7a4cfc966a3a97b6ca6a9ae2c9c070ba1176f4cd358" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.836473 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kwtsp" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.837878 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.838294 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.838645 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.838903 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.839157 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.839244 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jrmvv" event={"ID":"ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd","Type":"ContainerDied","Data":"1e060892ffe760f012bdce019da91ce0acdd27d47b04a5994958391d3e6f7035"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.839274 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jrmvv" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.839596 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.840313 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.840868 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.841628 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.842410 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.842908 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.843737 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.848131 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbmgc" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.848127 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbmgc" event={"ID":"c43bed42-ab14-48f1-a79d-dca61de1b6f1","Type":"ContainerDied","Data":"dd661be7c1003195c88f67523188446b570c178503049e9bc86715f87f1a6314"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.849000 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.849366 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.849669 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.850450 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.852170 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.852755 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.853821 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.855635 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.856107 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.857030 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.857283 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.857585 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.858336 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.859459 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.859604 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" event={"ID":"5485f8d6-493f-4b42-88a6-363043c13a90","Type":"ContainerDied","Data":"8906fa032c8ac7c9ec5103b758f19b779eec1c86e855adbe539542e6867c9849"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.860124 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.861487 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.861888 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.862187 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.862513 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.862862 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.864231 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.865763 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.865806 4822 scope.go:117] "RemoveContainer" containerID="ccb134177235cabde5b6218ff59f511a7980092cc052143ac0c0126ec0ea8a0e" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.866280 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.866738 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.867049 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.867457 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.869160 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.870039 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.870095 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tv5ck" event={"ID":"d9639beb-e6eb-42eb-9937-1b89a6be6100","Type":"ContainerDied","Data":"04dfa112bfe9415f3beb4848d2274a5647756a55f1f70461db5399d706dabf05"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.870331 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tv5ck" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.870326 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.870740 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.871101 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.872064 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.872650 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.872887 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.873260 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.873713 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.874249 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.874324 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.874412 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.874608 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"ad236f92344ab25b97050e078672bbb820884880e2cdb1534c030e351e91b71d"} Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.875059 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.875443 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.875705 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.875914 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.876113 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.876234 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:38 crc kubenswrapper[4822]: E1201 06:54:38.876296 4822 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.212:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.876325 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.876817 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.897715 4822 scope.go:117] "RemoveContainer" containerID="fc4c7079fd9e958feef23dd457e3b34dda5f60a928a137a5a5a82463a56e4321" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.944509 4822 scope.go:117] "RemoveContainer" containerID="df6b8210f80f3c995a39421f97443e6b7fc4716fb346ffee5aa6af3601c32e73" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.956862 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.957162 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.957381 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.957635 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.957885 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.958119 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.959034 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.959662 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.960274 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.960529 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.960770 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.960952 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:38 crc kubenswrapper[4822]: I1201 06:54:38.969220 4822 scope.go:117] "RemoveContainer" containerID="75ad0e68777a771a7b159e4c7213816ae513ea74cfe5392f9ef94e3d615be138" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.044338 4822 scope.go:117] "RemoveContainer" containerID="e9bfbf98a7107c94ce6c8d38c9749d7415e3783b446b67f9d28b3901ba693319" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.068808 4822 scope.go:117] "RemoveContainer" containerID="54d15f111dca7ffbab3597e812bb17c250186ec9ea700cc8aaa3bfa8d317dac6" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.105619 4822 scope.go:117] "RemoveContainer" containerID="112cb79570516f314e5c168a479445db1a85cf86cf589435b525f2d9c6c1f864" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.134473 4822 scope.go:117] "RemoveContainer" containerID="fff687c97ff61237d4d76d6629b3881d26d425c8877bc3d925676a781f9cbcc7" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.158356 4822 scope.go:117] "RemoveContainer" containerID="e1d198ad55d4d95796d0e2c18424e17e7cf4654042df800a3761692ce7934e25" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.175944 4822 scope.go:117] "RemoveContainer" containerID="4a1f800c3a1ccfd1c33813cac10bc6658ec0b3fdf8ca131d794acbdb4f8381cb" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.193095 4822 scope.go:117] "RemoveContainer" containerID="c18c50dda977ee3b552dc70c2a4cf04172d92bb7670ccda273216a1682b8515d" Dec 01 06:54:39 crc kubenswrapper[4822]: I1201 06:54:39.210352 4822 scope.go:117] "RemoveContainer" containerID="7beaa652fec2d1d1d2de74195c6601d1bb04000e4aec068ffc67edc2ef10830c" Dec 01 06:54:39 crc kubenswrapper[4822]: E1201 06:54:39.354452 4822 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 01 06:54:39 crc kubenswrapper[4822]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2" Netns:"/var/run/netns/7dc4155f-580e-4459-bc02-205bab142662" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:54:39 crc kubenswrapper[4822]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 06:54:39 crc kubenswrapper[4822]: > Dec 01 06:54:39 crc kubenswrapper[4822]: E1201 06:54:39.354624 4822 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 01 06:54:39 crc kubenswrapper[4822]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2" Netns:"/var/run/netns/7dc4155f-580e-4459-bc02-205bab142662" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:54:39 crc kubenswrapper[4822]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 06:54:39 crc kubenswrapper[4822]: > pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:39 crc kubenswrapper[4822]: E1201 06:54:39.354653 4822 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 01 06:54:39 crc kubenswrapper[4822]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2" Netns:"/var/run/netns/7dc4155f-580e-4459-bc02-205bab142662" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s": dial tcp 38.102.83.212:6443: connect: connection refused Dec 01 06:54:39 crc kubenswrapper[4822]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 06:54:39 crc kubenswrapper[4822]: > pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:39 crc kubenswrapper[4822]: E1201 06:54:39.354728 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-fltjl_openshift-marketplace_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74_0(92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2): error adding pod openshift-marketplace_marketplace-operator-79b997595-fltjl to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2\\\" Netns:\\\"/var/run/netns/7dc4155f-580e-4459-bc02-205bab142662\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-fltjl;K8S_POD_INFRA_CONTAINER_ID=92c2550d5efe0e676747f8912333a0c5eaebdf3858241fc2b63c403a87fa32e2;K8S_POD_UID=fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-fltjl] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-fltjl/fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-fltjl in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-fltjl?timeout=1m0s\\\": dial tcp 38.102.83.212:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podUID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Dec 01 06:54:39 crc kubenswrapper[4822]: E1201 06:54:39.926535 4822 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.212:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d04f4b6fb9381 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:54:38.087574401 +0000 UTC m=+233.408382097,LastTimestamp:2025-12-01 06:54:38.087574401 +0000 UTC m=+233.408382097,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.024584 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.025646 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.026343 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.026618 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.026916 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.027169 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.027433 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.027690 4822 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.027916 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.107401 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.108553 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.109352 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.109957 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.110382 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.110806 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.111203 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.111499 4822 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117002 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117097 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117257 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117402 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117396 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117447 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117676 4822 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.117707 4822 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.218601 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-kubelet-dir\") pod \"3fec124f-3cc9-4991-85b7-3a73de687907\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.218688 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fec124f-3cc9-4991-85b7-3a73de687907-kube-api-access\") pod \"3fec124f-3cc9-4991-85b7-3a73de687907\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.218720 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-var-lock\") pod \"3fec124f-3cc9-4991-85b7-3a73de687907\" (UID: \"3fec124f-3cc9-4991-85b7-3a73de687907\") " Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.218783 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3fec124f-3cc9-4991-85b7-3a73de687907" (UID: "3fec124f-3cc9-4991-85b7-3a73de687907"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.218894 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-var-lock" (OuterVolumeSpecName: "var-lock") pod "3fec124f-3cc9-4991-85b7-3a73de687907" (UID: "3fec124f-3cc9-4991-85b7-3a73de687907"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.219129 4822 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.219147 4822 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3fec124f-3cc9-4991-85b7-3a73de687907-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.219161 4822 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.226730 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fec124f-3cc9-4991-85b7-3a73de687907-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3fec124f-3cc9-4991-85b7-3a73de687907" (UID: "3fec124f-3cc9-4991-85b7-3a73de687907"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.320131 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fec124f-3cc9-4991-85b7-3a73de687907-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.895763 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.895756 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3fec124f-3cc9-4991-85b7-3a73de687907","Type":"ContainerDied","Data":"22cdb9238c980e04f11c15f355ff99840ca6bd8d36ba823896fffef90a9341fd"} Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.895902 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22cdb9238c980e04f11c15f355ff99840ca6bd8d36ba823896fffef90a9341fd" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.900676 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.901778 4822 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31" exitCode=0 Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.901841 4822 scope.go:117] "RemoveContainer" containerID="bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.902039 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.918607 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.918938 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.919273 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.919615 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.919886 4822 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.920192 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.920877 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.924414 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.924884 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.925158 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.925519 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.926176 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.926555 4822 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.926896 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.929858 4822 scope.go:117] "RemoveContainer" containerID="f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.952344 4822 scope.go:117] "RemoveContainer" containerID="c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.966470 4822 scope.go:117] "RemoveContainer" containerID="dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.969241 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.979955 4822 scope.go:117] "RemoveContainer" containerID="091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31" Dec 01 06:54:40 crc kubenswrapper[4822]: I1201 06:54:40.993902 4822 scope.go:117] "RemoveContainer" containerID="ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.437935 4822 scope.go:117] "RemoveContainer" containerID="bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d" Dec 01 06:54:41 crc kubenswrapper[4822]: E1201 06:54:41.439846 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\": container with ID starting with bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d not found: ID does not exist" containerID="bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.439891 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d"} err="failed to get container status \"bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\": rpc error: code = NotFound desc = could not find container \"bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d\": container with ID starting with bb2b8d611d38d6f633f36f82ba28ad2bb2213811aba760f8328c964929481d1d not found: ID does not exist" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.439925 4822 scope.go:117] "RemoveContainer" containerID="f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e" Dec 01 06:54:41 crc kubenswrapper[4822]: E1201 06:54:41.441825 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\": container with ID starting with f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e not found: ID does not exist" containerID="f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.441844 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e"} err="failed to get container status \"f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\": rpc error: code = NotFound desc = could not find container \"f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e\": container with ID starting with f078e3e000042deded6146b07db1a60d525cce8b04ec3d965c78bc5b7511357e not found: ID does not exist" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.441861 4822 scope.go:117] "RemoveContainer" containerID="c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d" Dec 01 06:54:41 crc kubenswrapper[4822]: E1201 06:54:41.442965 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\": container with ID starting with c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d not found: ID does not exist" containerID="c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.442983 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d"} err="failed to get container status \"c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\": rpc error: code = NotFound desc = could not find container \"c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d\": container with ID starting with c078a8fb1eabc9df073db8bd252f36f833fe634095746428b44a4da00689ec8d not found: ID does not exist" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.443002 4822 scope.go:117] "RemoveContainer" containerID="dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11" Dec 01 06:54:41 crc kubenswrapper[4822]: E1201 06:54:41.443602 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\": container with ID starting with dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11 not found: ID does not exist" containerID="dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.443626 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11"} err="failed to get container status \"dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\": rpc error: code = NotFound desc = could not find container \"dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11\": container with ID starting with dcf45a4c995c970e0c1c4e37aaf3c39da7de11784bd1fbc5110b2b7183229e11 not found: ID does not exist" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.443644 4822 scope.go:117] "RemoveContainer" containerID="091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31" Dec 01 06:54:41 crc kubenswrapper[4822]: E1201 06:54:41.443988 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\": container with ID starting with 091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31 not found: ID does not exist" containerID="091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.444012 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31"} err="failed to get container status \"091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\": rpc error: code = NotFound desc = could not find container \"091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31\": container with ID starting with 091ac6f5b1e15cd3d56e9291f8bdec45c77acb9194fb43434876b438bddc4b31 not found: ID does not exist" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.444033 4822 scope.go:117] "RemoveContainer" containerID="ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8" Dec 01 06:54:41 crc kubenswrapper[4822]: E1201 06:54:41.444690 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\": container with ID starting with ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8 not found: ID does not exist" containerID="ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8" Dec 01 06:54:41 crc kubenswrapper[4822]: I1201 06:54:41.444757 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8"} err="failed to get container status \"ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\": rpc error: code = NotFound desc = could not find container \"ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8\": container with ID starting with ce8f371ad86a4957f7ea8abd262b9ac8c1d437787a57c18fe378c60d5849a1d8 not found: ID does not exist" Dec 01 06:54:44 crc kubenswrapper[4822]: I1201 06:54:44.955735 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:44 crc kubenswrapper[4822]: I1201 06:54:44.958927 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:44 crc kubenswrapper[4822]: I1201 06:54:44.960247 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:44 crc kubenswrapper[4822]: I1201 06:54:44.962013 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:44 crc kubenswrapper[4822]: I1201 06:54:44.962644 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:44 crc kubenswrapper[4822]: I1201 06:54:44.963361 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.584261 4822 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.585092 4822 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.585529 4822 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.586087 4822 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.586470 4822 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:47 crc kubenswrapper[4822]: I1201 06:54:47.586516 4822 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.586903 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="200ms" Dec 01 06:54:47 crc kubenswrapper[4822]: E1201 06:54:47.787790 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="400ms" Dec 01 06:54:48 crc kubenswrapper[4822]: E1201 06:54:48.188234 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="800ms" Dec 01 06:54:48 crc kubenswrapper[4822]: E1201 06:54:48.989753 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="1.6s" Dec 01 06:54:49 crc kubenswrapper[4822]: E1201 06:54:49.928711 4822 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.212:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d04f4b6fb9381 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:54:38.087574401 +0000 UTC m=+233.408382097,LastTimestamp:2025-12-01 06:54:38.087574401 +0000 UTC m=+233.408382097,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.950163 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.951283 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.951795 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.952246 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.952944 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.953886 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.954619 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.970523 4822 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.970591 4822 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:49 crc kubenswrapper[4822]: E1201 06:54:49.971497 4822 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.972440 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.988857 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.989447 4822 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0" exitCode=1 Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.989504 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0"} Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.990473 4822 scope.go:117] "RemoveContainer" containerID="78d9f3febcadca479cad03dc3b2b17d4e83cc19ce9bd218f02d7bdff617410f0" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.990800 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.991540 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.992196 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.992663 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.993068 4822 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.993423 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: I1201 06:54:49.994071 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:49 crc kubenswrapper[4822]: W1201 06:54:49.998340 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-ecff8053c6e2cdac6c6d67c62ea7262515e91a3704b1074db04b165de40d0330 WatchSource:0}: Error finding container ecff8053c6e2cdac6c6d67c62ea7262515e91a3704b1074db04b165de40d0330: Status 404 returned error can't find the container with id ecff8053c6e2cdac6c6d67c62ea7262515e91a3704b1074db04b165de40d0330 Dec 01 06:54:50 crc kubenswrapper[4822]: I1201 06:54:50.173412 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:54:50 crc kubenswrapper[4822]: E1201 06:54:50.591455 4822 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="3.2s" Dec 01 06:54:50 crc kubenswrapper[4822]: I1201 06:54:50.998900 4822 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="7965d9cf2cc68025451e684fddc9fc1beeaa9832803ab3aa837cc3300812679d" exitCode=0 Dec 01 06:54:50 crc kubenswrapper[4822]: I1201 06:54:50.999013 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"7965d9cf2cc68025451e684fddc9fc1beeaa9832803ab3aa837cc3300812679d"} Dec 01 06:54:50 crc kubenswrapper[4822]: I1201 06:54:50.999090 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ecff8053c6e2cdac6c6d67c62ea7262515e91a3704b1074db04b165de40d0330"} Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:50.999491 4822 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:50.999511 4822 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:51 crc kubenswrapper[4822]: E1201 06:54:51.000110 4822 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.001130 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.001587 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.002233 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.003014 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.003331 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.003720 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.004282 4822 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.005219 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.005292 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c3188ba87fcf66f2e90697b6f24a9c8f65aa26a0942969a7685008f2dcd928a1"} Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.005842 4822 status_manager.go:851] "Failed to get status for pod" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" pod="openshift-marketplace/community-operators-jrmvv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-jrmvv\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.006645 4822 status_manager.go:851] "Failed to get status for pod" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" pod="openshift-marketplace/redhat-marketplace-tv5ck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-tv5ck\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.007438 4822 status_manager.go:851] "Failed to get status for pod" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.008263 4822 status_manager.go:851] "Failed to get status for pod" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" pod="openshift-marketplace/marketplace-operator-79b997595-5c4b8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5c4b8\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.008778 4822 status_manager.go:851] "Failed to get status for pod" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" pod="openshift-marketplace/certified-operators-kwtsp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kwtsp\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.009578 4822 status_manager.go:851] "Failed to get status for pod" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" pod="openshift-marketplace/redhat-operators-nbmgc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nbmgc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.010201 4822 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.212:6443: connect: connection refused" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.952056 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:51 crc kubenswrapper[4822]: I1201 06:54:51.953510 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:52 crc kubenswrapper[4822]: I1201 06:54:52.018382 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e228a3ae1151171c092711e31fc2a4184a1a54e30970c8b2a563b2eff012aaa2"} Dec 01 06:54:52 crc kubenswrapper[4822]: I1201 06:54:52.018470 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b256b1f5a79f2eb605a02d7df0752f2544a241b767cec99952eba8e9aad953bf"} Dec 01 06:54:52 crc kubenswrapper[4822]: I1201 06:54:52.018484 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9f7bebf45585b36b964857b4d7ee28f43bda19356e53be5d5f2b0a7bbf3a0aa3"} Dec 01 06:54:53 crc kubenswrapper[4822]: I1201 06:54:53.029490 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"60697e04812e9468f7820db80c357899ba344eb3d2bcee57fed8128741cbd0f8"} Dec 01 06:54:53 crc kubenswrapper[4822]: I1201 06:54:53.030386 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1c0d1c7f101a64653c5215678106ffedc98ad1ce0d332f8f35e1d10eb3e96725"} Dec 01 06:54:53 crc kubenswrapper[4822]: I1201 06:54:53.030415 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:53 crc kubenswrapper[4822]: I1201 06:54:53.029857 4822 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:53 crc kubenswrapper[4822]: I1201 06:54:53.030438 4822 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:54 crc kubenswrapper[4822]: I1201 06:54:54.972595 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:54 crc kubenswrapper[4822]: I1201 06:54:54.972684 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:54 crc kubenswrapper[4822]: I1201 06:54:54.981410 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:55 crc kubenswrapper[4822]: I1201 06:54:55.043249 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:54:55 crc kubenswrapper[4822]: I1201 06:54:55.051604 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:54:55 crc kubenswrapper[4822]: I1201 06:54:55.058180 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:54:57 crc kubenswrapper[4822]: W1201 06:54:57.912891 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfabd8807_fa3c_4319_aeb8_f5f8ee2a2a74.slice/crio-a99b782516fa5b1d2eef41a7d38f25d1e1b2277afcb895cc1140b2ddd2a9cf6e WatchSource:0}: Error finding container a99b782516fa5b1d2eef41a7d38f25d1e1b2277afcb895cc1140b2ddd2a9cf6e: Status 404 returned error can't find the container with id a99b782516fa5b1d2eef41a7d38f25d1e1b2277afcb895cc1140b2ddd2a9cf6e Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.048984 4822 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.076132 4822 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.076180 4822 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.076331 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" event={"ID":"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74","Type":"ContainerStarted","Data":"7197d8c165e3ed2dc15cd7d94c5f2e4ae262591aabaaadb6da94a061635c69c5"} Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.076364 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" event={"ID":"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74","Type":"ContainerStarted","Data":"a99b782516fa5b1d2eef41a7d38f25d1e1b2277afcb895cc1140b2ddd2a9cf6e"} Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.077431 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.080046 4822 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-fltjl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.080135 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podUID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.081575 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:54:58 crc kubenswrapper[4822]: I1201 06:54:58.257812 4822 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9065259a-04cf-4307-acbb-3f46cdb6a53b" Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.085428 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/0.log" Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.085538 4822 generic.go:334] "Generic (PLEG): container finished" podID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" containerID="7197d8c165e3ed2dc15cd7d94c5f2e4ae262591aabaaadb6da94a061635c69c5" exitCode=1 Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.085642 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" event={"ID":"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74","Type":"ContainerDied","Data":"7197d8c165e3ed2dc15cd7d94c5f2e4ae262591aabaaadb6da94a061635c69c5"} Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.086137 4822 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.086170 4822 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="b1519f97-7402-44a5-8979-b605e3957d76" Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.086787 4822 scope.go:117] "RemoveContainer" containerID="7197d8c165e3ed2dc15cd7d94c5f2e4ae262591aabaaadb6da94a061635c69c5" Dec 01 06:54:59 crc kubenswrapper[4822]: I1201 06:54:59.113241 4822 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9065259a-04cf-4307-acbb-3f46cdb6a53b" Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.094822 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/1.log" Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.097412 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/0.log" Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.097591 4822 generic.go:334] "Generic (PLEG): container finished" podID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" containerID="6a1242f206d70afa6aa1885bc2c86d0471696fa21be267681a0f08dc69e4d574" exitCode=1 Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.097680 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" event={"ID":"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74","Type":"ContainerDied","Data":"6a1242f206d70afa6aa1885bc2c86d0471696fa21be267681a0f08dc69e4d574"} Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.097795 4822 scope.go:117] "RemoveContainer" containerID="7197d8c165e3ed2dc15cd7d94c5f2e4ae262591aabaaadb6da94a061635c69c5" Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.098580 4822 scope.go:117] "RemoveContainer" containerID="6a1242f206d70afa6aa1885bc2c86d0471696fa21be267681a0f08dc69e4d574" Dec 01 06:55:00 crc kubenswrapper[4822]: E1201 06:55:00.099012 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\"" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podUID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Dec 01 06:55:00 crc kubenswrapper[4822]: I1201 06:55:00.176506 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:55:01 crc kubenswrapper[4822]: I1201 06:55:01.105909 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/1.log" Dec 01 06:55:01 crc kubenswrapper[4822]: I1201 06:55:01.106581 4822 scope.go:117] "RemoveContainer" containerID="6a1242f206d70afa6aa1885bc2c86d0471696fa21be267681a0f08dc69e4d574" Dec 01 06:55:01 crc kubenswrapper[4822]: E1201 06:55:01.106868 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\"" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podUID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Dec 01 06:55:07 crc kubenswrapper[4822]: I1201 06:55:07.617815 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:55:07 crc kubenswrapper[4822]: I1201 06:55:07.619110 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:55:07 crc kubenswrapper[4822]: I1201 06:55:07.620298 4822 scope.go:117] "RemoveContainer" containerID="6a1242f206d70afa6aa1885bc2c86d0471696fa21be267681a0f08dc69e4d574" Dec 01 06:55:07 crc kubenswrapper[4822]: E1201 06:55:07.621022 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-fltjl_openshift-marketplace(fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74)\"" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podUID="fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74" Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.097584 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.308488 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.308583 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.571166 4822 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.785724 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.946692 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 06:55:08 crc kubenswrapper[4822]: I1201 06:55:08.985117 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 06:55:09 crc kubenswrapper[4822]: I1201 06:55:09.214415 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:55:09 crc kubenswrapper[4822]: I1201 06:55:09.529540 4822 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 06:55:09 crc kubenswrapper[4822]: I1201 06:55:09.763938 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 06:55:09 crc kubenswrapper[4822]: I1201 06:55:09.816992 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 06:55:10 crc kubenswrapper[4822]: I1201 06:55:10.272162 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 06:55:10 crc kubenswrapper[4822]: I1201 06:55:10.342206 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 06:55:10 crc kubenswrapper[4822]: I1201 06:55:10.379522 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 06:55:10 crc kubenswrapper[4822]: I1201 06:55:10.779820 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.059249 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.075324 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.147202 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.199315 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.246323 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.253335 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.326113 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.415182 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.418071 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.436390 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.482861 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.539929 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.860201 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 06:55:11 crc kubenswrapper[4822]: I1201 06:55:11.890171 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.192906 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.200675 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.403343 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.430451 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.444912 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.525416 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.539492 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.782587 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.801281 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.870311 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 06:55:12 crc kubenswrapper[4822]: I1201 06:55:12.982132 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.074772 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.130255 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.230505 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.268534 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.282941 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.714741 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.725075 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.726213 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.762826 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.813933 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.829311 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.847378 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.875166 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.889024 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.946247 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:55:13 crc kubenswrapper[4822]: I1201 06:55:13.977293 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.047656 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.204519 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.222176 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.225540 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.265771 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.268272 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.398307 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.399500 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.530214 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.540947 4822 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.557204 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.696735 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.810291 4822 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.896416 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.897178 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 06:55:14 crc kubenswrapper[4822]: I1201 06:55:14.919003 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.080916 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.112345 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.147875 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.234871 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.297770 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.304518 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.320800 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.342122 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.350171 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.361039 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.478445 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.528334 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.588209 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.598319 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.618145 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.634846 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.660236 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.665538 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.689651 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.702726 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.743180 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.791007 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.933381 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 06:55:15 crc kubenswrapper[4822]: I1201 06:55:15.995482 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.003006 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.003354 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.077730 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.115490 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.188525 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.202936 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.206423 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.223912 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.258242 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.304393 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.380380 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.394833 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.461231 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.576627 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.714238 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.765795 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.807889 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.831291 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.883158 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.892033 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 06:55:16 crc kubenswrapper[4822]: I1201 06:55:16.998425 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.053120 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.148130 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.214909 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.321222 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.347655 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.359194 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.393969 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.496491 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.616529 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.687117 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.699815 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.722257 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.724646 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.739604 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.765055 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.765819 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 06:55:17 crc kubenswrapper[4822]: I1201 06:55:17.816147 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.007586 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.069667 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.108714 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.199373 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.209843 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.273629 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.287328 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.300954 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.415504 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.438072 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.478319 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.491000 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.543497 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.570379 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.601862 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.672069 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.725288 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.744508 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.792392 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.871450 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.927046 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.931690 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 06:55:18 crc kubenswrapper[4822]: I1201 06:55:18.981713 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.085633 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.230252 4822 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.241658 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tv5ck","openshift-marketplace/redhat-operators-nbmgc","openshift-marketplace/marketplace-operator-79b997595-5c4b8","openshift-marketplace/community-operators-jrmvv","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-kwtsp"] Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.242129 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.242351 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fltjl"] Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.243360 4822 scope.go:117] "RemoveContainer" containerID="6a1242f206d70afa6aa1885bc2c86d0471696fa21be267681a0f08dc69e4d574" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.250069 4822 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.282989 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.383460 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.38343789 podStartE2EDuration="21.38343789s" podCreationTimestamp="2025-12-01 06:54:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:55:19.381752679 +0000 UTC m=+274.702560365" watchObservedRunningTime="2025-12-01 06:55:19.38343789 +0000 UTC m=+274.704245576" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.520669 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.529791 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.604536 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.622958 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.660657 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.675506 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.719713 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.728706 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.754083 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.759178 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.825365 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.845398 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.846715 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.856620 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.857753 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:55:19 crc kubenswrapper[4822]: I1201 06:55:19.978955 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.064851 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.082613 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.097198 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.184046 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.191436 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.251343 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/1.log" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.251469 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" event={"ID":"fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74","Type":"ContainerStarted","Data":"e7faee52f0f42f3fc0efe18446c29550c936ff0f7f6ad341ea162a3df3116297"} Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.273165 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" podStartSLOduration=43.273133032 podStartE2EDuration="43.273133032s" podCreationTimestamp="2025-12-01 06:54:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:54:58.255322984 +0000 UTC m=+253.576130670" watchObservedRunningTime="2025-12-01 06:55:20.273133032 +0000 UTC m=+275.593940738" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.310261 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.334727 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.348865 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.412718 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.462632 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.498418 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.501539 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.526979 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.534197 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.618692 4822 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.619136 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f" gracePeriod=5 Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.740180 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.763259 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.821867 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.848190 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.867384 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.904117 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.946047 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.958273 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" path="/var/lib/kubelet/pods/5485f8d6-493f-4b42-88a6-363043c13a90/volumes" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.958850 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" path="/var/lib/kubelet/pods/a693d2fd-8db7-4699-ad03-e70175ead53c/volumes" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.959500 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" path="/var/lib/kubelet/pods/c43bed42-ab14-48f1-a79d-dca61de1b6f1/volumes" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.960666 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" path="/var/lib/kubelet/pods/d9639beb-e6eb-42eb-9937-1b89a6be6100/volumes" Dec 01 06:55:20 crc kubenswrapper[4822]: I1201 06:55:20.961280 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" path="/var/lib/kubelet/pods/ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd/volumes" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.006274 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.056377 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.100196 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.125048 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.125102 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.309677 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.317492 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.480756 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.500877 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.517986 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.579333 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.614405 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.624961 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.704353 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.776489 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.781777 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.856598 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.878252 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.897370 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 06:55:21 crc kubenswrapper[4822]: I1201 06:55:21.909896 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.004670 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.109650 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.117995 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.454217 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.553340 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.632236 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.632484 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.715019 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.762687 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:55:22 crc kubenswrapper[4822]: I1201 06:55:22.966014 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.019174 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.020339 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.197131 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.237418 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.306344 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.330861 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.364278 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.738493 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.777790 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.873825 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.939577 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:55:23 crc kubenswrapper[4822]: I1201 06:55:23.969994 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 06:55:24 crc kubenswrapper[4822]: I1201 06:55:24.047792 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 06:55:24 crc kubenswrapper[4822]: I1201 06:55:24.049888 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 06:55:24 crc kubenswrapper[4822]: I1201 06:55:24.670679 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 06:55:24 crc kubenswrapper[4822]: I1201 06:55:24.822417 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 06:55:24 crc kubenswrapper[4822]: I1201 06:55:24.853103 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 06:55:25 crc kubenswrapper[4822]: I1201 06:55:25.396106 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 06:55:25 crc kubenswrapper[4822]: I1201 06:55:25.408358 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.058910 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.204371 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.204480 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.293595 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.293667 4822 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f" exitCode=137 Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.293726 4822 scope.go:117] "RemoveContainer" containerID="32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.293783 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.314729 4822 scope.go:117] "RemoveContainer" containerID="32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f" Dec 01 06:55:26 crc kubenswrapper[4822]: E1201 06:55:26.316450 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f\": container with ID starting with 32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f not found: ID does not exist" containerID="32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.316495 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f"} err="failed to get container status \"32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f\": rpc error: code = NotFound desc = could not find container \"32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f\": container with ID starting with 32877cb00d3b15443f02d477fd1f16573b2bf61aa46e032d97593140b852ca4f not found: ID does not exist" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357080 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357239 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357255 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357293 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357310 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357327 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357386 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357411 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357705 4822 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357722 4822 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357735 4822 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.357763 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.367074 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.458967 4822 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.459037 4822 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:26 crc kubenswrapper[4822]: I1201 06:55:26.961403 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 01 06:55:27 crc kubenswrapper[4822]: I1201 06:55:27.617268 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:55:27 crc kubenswrapper[4822]: I1201 06:55:27.627571 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fltjl" Dec 01 06:55:27 crc kubenswrapper[4822]: I1201 06:55:27.714977 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.232172 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nmx5d"] Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.233127 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" podUID="4570adbe-e705-4cbe-aecb-b27a3b930048" containerName="controller-manager" containerID="cri-o://d186d0335b30ae51960013dc7d24fc79a2bc3922a7341ddf5c0dde85280e39ed" gracePeriod=30 Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.366243 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j"] Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.373498 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" podUID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" containerName="route-controller-manager" containerID="cri-o://7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5" gracePeriod=30 Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.421689 4822 generic.go:334] "Generic (PLEG): container finished" podID="4570adbe-e705-4cbe-aecb-b27a3b930048" containerID="d186d0335b30ae51960013dc7d24fc79a2bc3922a7341ddf5c0dde85280e39ed" exitCode=0 Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.421747 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" event={"ID":"4570adbe-e705-4cbe-aecb-b27a3b930048","Type":"ContainerDied","Data":"d186d0335b30ae51960013dc7d24fc79a2bc3922a7341ddf5c0dde85280e39ed"} Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.684264 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.742605 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.825449 4822 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869059 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-proxy-ca-bundles\") pod \"4570adbe-e705-4cbe-aecb-b27a3b930048\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869118 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjtvr\" (UniqueName: \"kubernetes.io/projected/4570adbe-e705-4cbe-aecb-b27a3b930048-kube-api-access-gjtvr\") pod \"4570adbe-e705-4cbe-aecb-b27a3b930048\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869166 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-client-ca\") pod \"4570adbe-e705-4cbe-aecb-b27a3b930048\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869219 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9fln\" (UniqueName: \"kubernetes.io/projected/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-kube-api-access-n9fln\") pod \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869256 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-serving-cert\") pod \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869278 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4570adbe-e705-4cbe-aecb-b27a3b930048-serving-cert\") pod \"4570adbe-e705-4cbe-aecb-b27a3b930048\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869316 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-config\") pod \"4570adbe-e705-4cbe-aecb-b27a3b930048\" (UID: \"4570adbe-e705-4cbe-aecb-b27a3b930048\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869394 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-config\") pod \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.869429 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-client-ca\") pod \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\" (UID: \"218e0dbf-c5f6-405f-a22f-31b6f0f25d60\") " Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.870147 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-config" (OuterVolumeSpecName: "config") pod "218e0dbf-c5f6-405f-a22f-31b6f0f25d60" (UID: "218e0dbf-c5f6-405f-a22f-31b6f0f25d60"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.870197 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-config" (OuterVolumeSpecName: "config") pod "4570adbe-e705-4cbe-aecb-b27a3b930048" (UID: "4570adbe-e705-4cbe-aecb-b27a3b930048"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.870220 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-client-ca" (OuterVolumeSpecName: "client-ca") pod "218e0dbf-c5f6-405f-a22f-31b6f0f25d60" (UID: "218e0dbf-c5f6-405f-a22f-31b6f0f25d60"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.870255 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-client-ca" (OuterVolumeSpecName: "client-ca") pod "4570adbe-e705-4cbe-aecb-b27a3b930048" (UID: "4570adbe-e705-4cbe-aecb-b27a3b930048"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.870880 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4570adbe-e705-4cbe-aecb-b27a3b930048" (UID: "4570adbe-e705-4cbe-aecb-b27a3b930048"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.876372 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "218e0dbf-c5f6-405f-a22f-31b6f0f25d60" (UID: "218e0dbf-c5f6-405f-a22f-31b6f0f25d60"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.876614 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-kube-api-access-n9fln" (OuterVolumeSpecName: "kube-api-access-n9fln") pod "218e0dbf-c5f6-405f-a22f-31b6f0f25d60" (UID: "218e0dbf-c5f6-405f-a22f-31b6f0f25d60"). InnerVolumeSpecName "kube-api-access-n9fln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.876789 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4570adbe-e705-4cbe-aecb-b27a3b930048-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4570adbe-e705-4cbe-aecb-b27a3b930048" (UID: "4570adbe-e705-4cbe-aecb-b27a3b930048"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.877463 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4570adbe-e705-4cbe-aecb-b27a3b930048-kube-api-access-gjtvr" (OuterVolumeSpecName: "kube-api-access-gjtvr") pod "4570adbe-e705-4cbe-aecb-b27a3b930048" (UID: "4570adbe-e705-4cbe-aecb-b27a3b930048"). InnerVolumeSpecName "kube-api-access-gjtvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970397 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970433 4822 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970450 4822 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970465 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjtvr\" (UniqueName: \"kubernetes.io/projected/4570adbe-e705-4cbe-aecb-b27a3b930048-kube-api-access-gjtvr\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970479 4822 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970491 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9fln\" (UniqueName: \"kubernetes.io/projected/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-kube-api-access-n9fln\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970501 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4570adbe-e705-4cbe-aecb-b27a3b930048-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970512 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/218e0dbf-c5f6-405f-a22f-31b6f0f25d60-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:44 crc kubenswrapper[4822]: I1201 06:55:44.970524 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4570adbe-e705-4cbe-aecb-b27a3b930048-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.429995 4822 generic.go:334] "Generic (PLEG): container finished" podID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" containerID="7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5" exitCode=0 Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.430242 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.430242 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" event={"ID":"218e0dbf-c5f6-405f-a22f-31b6f0f25d60","Type":"ContainerDied","Data":"7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5"} Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.430514 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j" event={"ID":"218e0dbf-c5f6-405f-a22f-31b6f0f25d60","Type":"ContainerDied","Data":"e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb"} Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.430595 4822 scope.go:117] "RemoveContainer" containerID="7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.438260 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" event={"ID":"4570adbe-e705-4cbe-aecb-b27a3b930048","Type":"ContainerDied","Data":"f55a01453fda6e34ee70ea37d7806295ec2deac5ca7697dff7f91cc5c9363358"} Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.438363 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nmx5d" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.458958 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j"] Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.467001 4822 scope.go:117] "RemoveContainer" containerID="7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5" Dec 01 06:55:45 crc kubenswrapper[4822]: E1201 06:55:45.468137 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5\": container with ID starting with 7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5 not found: ID does not exist" containerID="7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.468203 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5"} err="failed to get container status \"7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5\": rpc error: code = NotFound desc = could not find container \"7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5\": container with ID starting with 7be048731b2477d1012cd3f8e92811eb6b88b40897ddd0521f5633b859dafcf5 not found: ID does not exist" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.468245 4822 scope.go:117] "RemoveContainer" containerID="d186d0335b30ae51960013dc7d24fc79a2bc3922a7341ddf5c0dde85280e39ed" Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.468394 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9sr2j"] Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.474543 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nmx5d"] Dec 01 06:55:45 crc kubenswrapper[4822]: I1201 06:55:45.479613 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nmx5d"] Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500309 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf"] Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500690 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500711 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500720 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500726 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500738 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500746 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500752 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500758 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500770 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" containerName="installer" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500776 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" containerName="installer" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500786 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500793 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500802 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4570adbe-e705-4cbe-aecb-b27a3b930048" containerName="controller-manager" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500811 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4570adbe-e705-4cbe-aecb-b27a3b930048" containerName="controller-manager" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500821 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500828 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500837 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500843 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500851 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500857 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="extract-content" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500865 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500871 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500878 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500884 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500894 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" containerName="route-controller-manager" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500909 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" containerName="route-controller-manager" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500924 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" containerName="marketplace-operator" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500930 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" containerName="marketplace-operator" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500940 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500946 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="extract-utilities" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500956 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500962 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: E1201 06:55:46.500969 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.500975 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501080 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" containerName="route-controller-manager" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501090 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fec124f-3cc9-4991-85b7-3a73de687907" containerName="installer" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501108 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c43bed42-ab14-48f1-a79d-dca61de1b6f1" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501117 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4570adbe-e705-4cbe-aecb-b27a3b930048" containerName="controller-manager" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501130 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a693d2fd-8db7-4699-ad03-e70175ead53c" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501140 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501167 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="5485f8d6-493f-4b42-88a6-363043c13a90" containerName="marketplace-operator" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501174 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee211f87-9d8b-4a63-9d2e-7f2b7aa54ecd" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.501184 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9639beb-e6eb-42eb-9937-1b89a6be6100" containerName="registry-server" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.503082 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.518294 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.518914 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.519024 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.519259 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.518940 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.519623 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.554934 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-56888b7f5d-hrctp"] Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.557063 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.562962 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56888b7f5d-hrctp"] Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.563191 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.563682 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.573136 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf"] Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.564046 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.564115 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.568167 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.575626 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.576704 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.595319 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/255925e6-ea26-472b-b0a7-763ed51602db-serving-cert\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.595417 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-client-ca\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.595464 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp57r\" (UniqueName: \"kubernetes.io/projected/255925e6-ea26-472b-b0a7-763ed51602db-kube-api-access-hp57r\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.595567 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-config\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697067 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-client-ca\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697150 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp57r\" (UniqueName: \"kubernetes.io/projected/255925e6-ea26-472b-b0a7-763ed51602db-kube-api-access-hp57r\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697205 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-config\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697257 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-proxy-ca-bundles\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697290 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-serving-cert\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697337 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-client-ca\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697370 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/255925e6-ea26-472b-b0a7-763ed51602db-serving-cert\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697396 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8xzs\" (UniqueName: \"kubernetes.io/projected/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-kube-api-access-q8xzs\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.697425 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-config\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.698909 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-client-ca\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.699334 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-config\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.705400 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/255925e6-ea26-472b-b0a7-763ed51602db-serving-cert\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.718448 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp57r\" (UniqueName: \"kubernetes.io/projected/255925e6-ea26-472b-b0a7-763ed51602db-kube-api-access-hp57r\") pod \"route-controller-manager-6cb99949b-psbdf\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.799048 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-client-ca\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.799124 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8xzs\" (UniqueName: \"kubernetes.io/projected/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-kube-api-access-q8xzs\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.799188 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-config\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.799254 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-proxy-ca-bundles\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.799280 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-serving-cert\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.800309 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-client-ca\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.801322 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-config\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.801470 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-proxy-ca-bundles\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.810111 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-serving-cert\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.817524 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8xzs\" (UniqueName: \"kubernetes.io/projected/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-kube-api-access-q8xzs\") pod \"controller-manager-56888b7f5d-hrctp\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.867639 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.881987 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.972057 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="218e0dbf-c5f6-405f-a22f-31b6f0f25d60" path="/var/lib/kubelet/pods/218e0dbf-c5f6-405f-a22f-31b6f0f25d60/volumes" Dec 01 06:55:46 crc kubenswrapper[4822]: I1201 06:55:46.980255 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4570adbe-e705-4cbe-aecb-b27a3b930048" path="/var/lib/kubelet/pods/4570adbe-e705-4cbe-aecb-b27a3b930048/volumes" Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.106342 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56888b7f5d-hrctp"] Dec 01 06:55:47 crc kubenswrapper[4822]: W1201 06:55:47.115578 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b3bcdb4_4b0c_48c6_9d86_3556a062717b.slice/crio-f3d66b48b370c59bad4f0dff262be78904745db6e2f4bd786112fe68e1db4eac WatchSource:0}: Error finding container f3d66b48b370c59bad4f0dff262be78904745db6e2f4bd786112fe68e1db4eac: Status 404 returned error can't find the container with id f3d66b48b370c59bad4f0dff262be78904745db6e2f4bd786112fe68e1db4eac Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.173965 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf"] Dec 01 06:55:47 crc kubenswrapper[4822]: W1201 06:55:47.183128 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod255925e6_ea26_472b_b0a7_763ed51602db.slice/crio-433cc0290a913219401ee80f8a0e4a517a8aa1755fd08b3ed7fbac5c3172bcf3 WatchSource:0}: Error finding container 433cc0290a913219401ee80f8a0e4a517a8aa1755fd08b3ed7fbac5c3172bcf3: Status 404 returned error can't find the container with id 433cc0290a913219401ee80f8a0e4a517a8aa1755fd08b3ed7fbac5c3172bcf3 Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.454755 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" event={"ID":"255925e6-ea26-472b-b0a7-763ed51602db","Type":"ContainerStarted","Data":"da1aae2f883153b02521329fe14df3f38881c0c72a7d46b474979f3a5be0b924"} Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.454837 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.454857 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" event={"ID":"255925e6-ea26-472b-b0a7-763ed51602db","Type":"ContainerStarted","Data":"433cc0290a913219401ee80f8a0e4a517a8aa1755fd08b3ed7fbac5c3172bcf3"} Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.456475 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" event={"ID":"3b3bcdb4-4b0c-48c6-9d86-3556a062717b","Type":"ContainerStarted","Data":"a9c8e422abb47446e76e8a198fba3dd836a5b8c2d75fa922fc1e0a385a0b6324"} Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.456520 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" event={"ID":"3b3bcdb4-4b0c-48c6-9d86-3556a062717b","Type":"ContainerStarted","Data":"f3d66b48b370c59bad4f0dff262be78904745db6e2f4bd786112fe68e1db4eac"} Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.456794 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.484158 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" podStartSLOduration=3.484139439 podStartE2EDuration="3.484139439s" podCreationTimestamp="2025-12-01 06:55:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:55:47.481832619 +0000 UTC m=+302.802640315" watchObservedRunningTime="2025-12-01 06:55:47.484139439 +0000 UTC m=+302.804947125" Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.495721 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.514503 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" podStartSLOduration=3.5144816519999997 podStartE2EDuration="3.514481652s" podCreationTimestamp="2025-12-01 06:55:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:55:47.510493731 +0000 UTC m=+302.831301437" watchObservedRunningTime="2025-12-01 06:55:47.514481652 +0000 UTC m=+302.835289348" Dec 01 06:55:47 crc kubenswrapper[4822]: I1201 06:55:47.628504 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:55:48 crc kubenswrapper[4822]: E1201 06:55:48.324359 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice/crio-e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb\": RecentStats: unable to find data in memory cache]" Dec 01 06:55:58 crc kubenswrapper[4822]: E1201 06:55:58.486044 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice/crio-e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.228596 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56888b7f5d-hrctp"] Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.229877 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" podUID="3b3bcdb4-4b0c-48c6-9d86-3556a062717b" containerName="controller-manager" containerID="cri-o://a9c8e422abb47446e76e8a198fba3dd836a5b8c2d75fa922fc1e0a385a0b6324" gracePeriod=30 Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.237540 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf"] Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.242110 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" podUID="255925e6-ea26-472b-b0a7-763ed51602db" containerName="route-controller-manager" containerID="cri-o://da1aae2f883153b02521329fe14df3f38881c0c72a7d46b474979f3a5be0b924" gracePeriod=30 Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.578399 4822 generic.go:334] "Generic (PLEG): container finished" podID="255925e6-ea26-472b-b0a7-763ed51602db" containerID="da1aae2f883153b02521329fe14df3f38881c0c72a7d46b474979f3a5be0b924" exitCode=0 Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.578506 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" event={"ID":"255925e6-ea26-472b-b0a7-763ed51602db","Type":"ContainerDied","Data":"da1aae2f883153b02521329fe14df3f38881c0c72a7d46b474979f3a5be0b924"} Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.581859 4822 generic.go:334] "Generic (PLEG): container finished" podID="3b3bcdb4-4b0c-48c6-9d86-3556a062717b" containerID="a9c8e422abb47446e76e8a198fba3dd836a5b8c2d75fa922fc1e0a385a0b6324" exitCode=0 Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.581891 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" event={"ID":"3b3bcdb4-4b0c-48c6-9d86-3556a062717b","Type":"ContainerDied","Data":"a9c8e422abb47446e76e8a198fba3dd836a5b8c2d75fa922fc1e0a385a0b6324"} Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.827007 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.929192 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.963543 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/255925e6-ea26-472b-b0a7-763ed51602db-serving-cert\") pod \"255925e6-ea26-472b-b0a7-763ed51602db\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.963654 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-client-ca\") pod \"255925e6-ea26-472b-b0a7-763ed51602db\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.963706 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp57r\" (UniqueName: \"kubernetes.io/projected/255925e6-ea26-472b-b0a7-763ed51602db-kube-api-access-hp57r\") pod \"255925e6-ea26-472b-b0a7-763ed51602db\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.963734 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-config\") pod \"255925e6-ea26-472b-b0a7-763ed51602db\" (UID: \"255925e6-ea26-472b-b0a7-763ed51602db\") " Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.965129 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-config" (OuterVolumeSpecName: "config") pod "255925e6-ea26-472b-b0a7-763ed51602db" (UID: "255925e6-ea26-472b-b0a7-763ed51602db"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.965244 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-client-ca" (OuterVolumeSpecName: "client-ca") pod "255925e6-ea26-472b-b0a7-763ed51602db" (UID: "255925e6-ea26-472b-b0a7-763ed51602db"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.971698 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/255925e6-ea26-472b-b0a7-763ed51602db-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "255925e6-ea26-472b-b0a7-763ed51602db" (UID: "255925e6-ea26-472b-b0a7-763ed51602db"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:56:04 crc kubenswrapper[4822]: I1201 06:56:04.971765 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/255925e6-ea26-472b-b0a7-763ed51602db-kube-api-access-hp57r" (OuterVolumeSpecName: "kube-api-access-hp57r") pod "255925e6-ea26-472b-b0a7-763ed51602db" (UID: "255925e6-ea26-472b-b0a7-763ed51602db"). InnerVolumeSpecName "kube-api-access-hp57r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065321 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-proxy-ca-bundles\") pod \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065392 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-client-ca\") pod \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065436 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-config\") pod \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065499 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-serving-cert\") pod \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065538 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8xzs\" (UniqueName: \"kubernetes.io/projected/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-kube-api-access-q8xzs\") pod \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\" (UID: \"3b3bcdb4-4b0c-48c6-9d86-3556a062717b\") " Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065960 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp57r\" (UniqueName: \"kubernetes.io/projected/255925e6-ea26-472b-b0a7-763ed51602db-kube-api-access-hp57r\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065979 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.065995 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/255925e6-ea26-472b-b0a7-763ed51602db-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.066007 4822 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/255925e6-ea26-472b-b0a7-763ed51602db-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.066840 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3b3bcdb4-4b0c-48c6-9d86-3556a062717b" (UID: "3b3bcdb4-4b0c-48c6-9d86-3556a062717b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.066913 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-client-ca" (OuterVolumeSpecName: "client-ca") pod "3b3bcdb4-4b0c-48c6-9d86-3556a062717b" (UID: "3b3bcdb4-4b0c-48c6-9d86-3556a062717b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.067128 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-config" (OuterVolumeSpecName: "config") pod "3b3bcdb4-4b0c-48c6-9d86-3556a062717b" (UID: "3b3bcdb4-4b0c-48c6-9d86-3556a062717b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.068928 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-kube-api-access-q8xzs" (OuterVolumeSpecName: "kube-api-access-q8xzs") pod "3b3bcdb4-4b0c-48c6-9d86-3556a062717b" (UID: "3b3bcdb4-4b0c-48c6-9d86-3556a062717b"). InnerVolumeSpecName "kube-api-access-q8xzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.071517 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3b3bcdb4-4b0c-48c6-9d86-3556a062717b" (UID: "3b3bcdb4-4b0c-48c6-9d86-3556a062717b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.167867 4822 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.167909 4822 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.167920 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.167932 4822 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.167948 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8xzs\" (UniqueName: \"kubernetes.io/projected/3b3bcdb4-4b0c-48c6-9d86-3556a062717b-kube-api-access-q8xzs\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.520259 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq"] Dec 01 06:56:05 crc kubenswrapper[4822]: E1201 06:56:05.520919 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b3bcdb4-4b0c-48c6-9d86-3556a062717b" containerName="controller-manager" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.520943 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b3bcdb4-4b0c-48c6-9d86-3556a062717b" containerName="controller-manager" Dec 01 06:56:05 crc kubenswrapper[4822]: E1201 06:56:05.520971 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="255925e6-ea26-472b-b0a7-763ed51602db" containerName="route-controller-manager" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.520982 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="255925e6-ea26-472b-b0a7-763ed51602db" containerName="route-controller-manager" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.522836 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b3bcdb4-4b0c-48c6-9d86-3556a062717b" containerName="controller-manager" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.522914 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="255925e6-ea26-472b-b0a7-763ed51602db" containerName="route-controller-manager" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.523705 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.529546 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-56c66d47dc-8mwlx"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.531676 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.539181 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.544528 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56c66d47dc-8mwlx"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.590824 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" event={"ID":"255925e6-ea26-472b-b0a7-763ed51602db","Type":"ContainerDied","Data":"433cc0290a913219401ee80f8a0e4a517a8aa1755fd08b3ed7fbac5c3172bcf3"} Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.590887 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.590914 4822 scope.go:117] "RemoveContainer" containerID="da1aae2f883153b02521329fe14df3f38881c0c72a7d46b474979f3a5be0b924" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.596221 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" event={"ID":"3b3bcdb4-4b0c-48c6-9d86-3556a062717b","Type":"ContainerDied","Data":"f3d66b48b370c59bad4f0dff262be78904745db6e2f4bd786112fe68e1db4eac"} Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.596365 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56888b7f5d-hrctp" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.620029 4822 scope.go:117] "RemoveContainer" containerID="a9c8e422abb47446e76e8a198fba3dd836a5b8c2d75fa922fc1e0a385a0b6324" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.621770 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.625154 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cb99949b-psbdf"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.640175 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-56888b7f5d-hrctp"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.645948 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-56888b7f5d-hrctp"] Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674533 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nmpj\" (UniqueName: \"kubernetes.io/projected/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-kube-api-access-8nmpj\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674618 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-config\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674650 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-serving-cert\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674685 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-client-ca\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674714 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-client-ca\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674772 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-config\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.674947 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5eba4855-d362-49f4-a636-ba315e2aa3a6-serving-cert\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.675064 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjvtq\" (UniqueName: \"kubernetes.io/projected/5eba4855-d362-49f4-a636-ba315e2aa3a6-kube-api-access-fjvtq\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.675138 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-proxy-ca-bundles\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776674 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nmpj\" (UniqueName: \"kubernetes.io/projected/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-kube-api-access-8nmpj\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776726 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-config\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776770 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-serving-cert\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776841 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-client-ca\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776877 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-client-ca\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776903 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-config\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776933 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5eba4855-d362-49f4-a636-ba315e2aa3a6-serving-cert\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776965 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjvtq\" (UniqueName: \"kubernetes.io/projected/5eba4855-d362-49f4-a636-ba315e2aa3a6-kube-api-access-fjvtq\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.776999 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-proxy-ca-bundles\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.778379 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-client-ca\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.778587 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-proxy-ca-bundles\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.778617 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5eba4855-d362-49f4-a636-ba315e2aa3a6-config\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.778725 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-client-ca\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.781491 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5eba4855-d362-49f4-a636-ba315e2aa3a6-serving-cert\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.786904 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-serving-cert\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.786920 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-config\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.795657 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjvtq\" (UniqueName: \"kubernetes.io/projected/5eba4855-d362-49f4-a636-ba315e2aa3a6-kube-api-access-fjvtq\") pod \"controller-manager-56c66d47dc-8mwlx\" (UID: \"5eba4855-d362-49f4-a636-ba315e2aa3a6\") " pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.797040 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nmpj\" (UniqueName: \"kubernetes.io/projected/60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd-kube-api-access-8nmpj\") pod \"route-controller-manager-bfcc854dc-hbbqq\" (UID: \"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd\") " pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.877218 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:05 crc kubenswrapper[4822]: I1201 06:56:05.888526 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.367720 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq"] Dec 01 06:56:06 crc kubenswrapper[4822]: W1201 06:56:06.376522 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60eaa5dd_9b8f_4b49_8f2b_1aece7111ecd.slice/crio-60fe23620fa6a61c8ee635f61a843daf7f0fcce0b950cc53a342c0d6c6a5edf3 WatchSource:0}: Error finding container 60fe23620fa6a61c8ee635f61a843daf7f0fcce0b950cc53a342c0d6c6a5edf3: Status 404 returned error can't find the container with id 60fe23620fa6a61c8ee635f61a843daf7f0fcce0b950cc53a342c0d6c6a5edf3 Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.449302 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56c66d47dc-8mwlx"] Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.601500 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" event={"ID":"5eba4855-d362-49f4-a636-ba315e2aa3a6","Type":"ContainerStarted","Data":"76b9d680bdcdbec38fa6a3655a18d3d365184a24ef506093d2081b1b7751be89"} Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.605106 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" event={"ID":"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd","Type":"ContainerStarted","Data":"5ae0e766c9c2513cdd7019c0cc23d21e776c13180a1469b5fb5681d6ba07135f"} Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.605134 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" event={"ID":"60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd","Type":"ContainerStarted","Data":"60fe23620fa6a61c8ee635f61a843daf7f0fcce0b950cc53a342c0d6c6a5edf3"} Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.605426 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.608564 4822 patch_prober.go:28] interesting pod/route-controller-manager-bfcc854dc-hbbqq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.608631 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" podUID="60eaa5dd-9b8f-4b49-8f2b-1aece7111ecd" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.626244 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" podStartSLOduration=2.626220552 podStartE2EDuration="2.626220552s" podCreationTimestamp="2025-12-01 06:56:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:56:06.625976574 +0000 UTC m=+321.946784270" watchObservedRunningTime="2025-12-01 06:56:06.626220552 +0000 UTC m=+321.947028238" Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.958658 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="255925e6-ea26-472b-b0a7-763ed51602db" path="/var/lib/kubelet/pods/255925e6-ea26-472b-b0a7-763ed51602db/volumes" Dec 01 06:56:06 crc kubenswrapper[4822]: I1201 06:56:06.959627 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b3bcdb4-4b0c-48c6-9d86-3556a062717b" path="/var/lib/kubelet/pods/3b3bcdb4-4b0c-48c6-9d86-3556a062717b/volumes" Dec 01 06:56:07 crc kubenswrapper[4822]: I1201 06:56:07.613136 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" event={"ID":"5eba4855-d362-49f4-a636-ba315e2aa3a6","Type":"ContainerStarted","Data":"009477bd58e669ee4f120c1dfb1212c004e78ee841765a45b711d8731df4ba99"} Dec 01 06:56:07 crc kubenswrapper[4822]: I1201 06:56:07.619482 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bfcc854dc-hbbqq" Dec 01 06:56:07 crc kubenswrapper[4822]: I1201 06:56:07.658906 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" podStartSLOduration=3.6588748840000003 podStartE2EDuration="3.658874884s" podCreationTimestamp="2025-12-01 06:56:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:56:07.640254597 +0000 UTC m=+322.961062283" watchObservedRunningTime="2025-12-01 06:56:07.658874884 +0000 UTC m=+322.979682570" Dec 01 06:56:08 crc kubenswrapper[4822]: I1201 06:56:08.623429 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:08 crc kubenswrapper[4822]: I1201 06:56:08.635756 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-56c66d47dc-8mwlx" Dec 01 06:56:08 crc kubenswrapper[4822]: E1201 06:56:08.641699 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice/crio-e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:18 crc kubenswrapper[4822]: E1201 06:56:18.810510 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice/crio-e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:28 crc kubenswrapper[4822]: E1201 06:56:28.946780 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice/crio-e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.123950 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-b66rx"] Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.125539 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.148292 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-b66rx"] Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157151 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b0163f9b-75db-4986-965d-28a34b294aa1-trusted-ca\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157227 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-registry-tls\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157452 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b0163f9b-75db-4986-965d-28a34b294aa1-registry-certificates\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157657 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-bound-sa-token\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157769 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157898 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b0163f9b-75db-4986-965d-28a34b294aa1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.157976 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c69j7\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-kube-api-access-c69j7\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.158018 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b0163f9b-75db-4986-965d-28a34b294aa1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.191573 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259222 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-registry-tls\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259289 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b0163f9b-75db-4986-965d-28a34b294aa1-registry-certificates\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259333 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-bound-sa-token\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259373 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b0163f9b-75db-4986-965d-28a34b294aa1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259394 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c69j7\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-kube-api-access-c69j7\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259411 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b0163f9b-75db-4986-965d-28a34b294aa1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.259442 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b0163f9b-75db-4986-965d-28a34b294aa1-trusted-ca\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.260889 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b0163f9b-75db-4986-965d-28a34b294aa1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.261410 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b0163f9b-75db-4986-965d-28a34b294aa1-trusted-ca\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.261530 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b0163f9b-75db-4986-965d-28a34b294aa1-registry-certificates\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.270969 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-registry-tls\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.273975 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b0163f9b-75db-4986-965d-28a34b294aa1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.285296 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c69j7\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-kube-api-access-c69j7\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.286058 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b0163f9b-75db-4986-965d-28a34b294aa1-bound-sa-token\") pod \"image-registry-66df7c8f76-b66rx\" (UID: \"b0163f9b-75db-4986-965d-28a34b294aa1\") " pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.445396 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:34 crc kubenswrapper[4822]: I1201 06:56:34.968374 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-b66rx"] Dec 01 06:56:34 crc kubenswrapper[4822]: W1201 06:56:34.977199 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0163f9b_75db_4986_965d_28a34b294aa1.slice/crio-d3c0172ed06af77ecddccbc0a02bc9aaed3546d2d9aea9e566d0e76b9e2d0120 WatchSource:0}: Error finding container d3c0172ed06af77ecddccbc0a02bc9aaed3546d2d9aea9e566d0e76b9e2d0120: Status 404 returned error can't find the container with id d3c0172ed06af77ecddccbc0a02bc9aaed3546d2d9aea9e566d0e76b9e2d0120 Dec 01 06:56:35 crc kubenswrapper[4822]: I1201 06:56:35.830828 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" event={"ID":"b0163f9b-75db-4986-965d-28a34b294aa1","Type":"ContainerStarted","Data":"b717e386c43cc3039c55935525148e77f1e6fab95b930534090de1401444fe19"} Dec 01 06:56:35 crc kubenswrapper[4822]: I1201 06:56:35.830907 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" event={"ID":"b0163f9b-75db-4986-965d-28a34b294aa1","Type":"ContainerStarted","Data":"d3c0172ed06af77ecddccbc0a02bc9aaed3546d2d9aea9e566d0e76b9e2d0120"} Dec 01 06:56:35 crc kubenswrapper[4822]: I1201 06:56:35.832274 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:35 crc kubenswrapper[4822]: I1201 06:56:35.851387 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" podStartSLOduration=1.85137043 podStartE2EDuration="1.85137043s" podCreationTimestamp="2025-12-01 06:56:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:56:35.850105162 +0000 UTC m=+351.170912858" watchObservedRunningTime="2025-12-01 06:56:35.85137043 +0000 UTC m=+351.172178116" Dec 01 06:56:39 crc kubenswrapper[4822]: E1201 06:56:39.103614 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice/crio-e99cf940b27126385d82ca118adcda6d6d2d8c00b3e91f049142f9358b3bd0bb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218e0dbf_c5f6_405f_a22f_31b6f0f25d60.slice\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:42 crc kubenswrapper[4822]: I1201 06:56:42.542878 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:56:42 crc kubenswrapper[4822]: I1201 06:56:42.542986 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:56:47 crc kubenswrapper[4822]: I1201 06:56:47.916925 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nwqwt"] Dec 01 06:56:47 crc kubenswrapper[4822]: I1201 06:56:47.919487 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:47 crc kubenswrapper[4822]: I1201 06:56:47.923158 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 06:56:47 crc kubenswrapper[4822]: I1201 06:56:47.930024 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwqwt"] Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.010646 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh7s6\" (UniqueName: \"kubernetes.io/projected/0b600d64-aa25-4b7a-bce7-482503ba8f7d-kube-api-access-zh7s6\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.010712 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b600d64-aa25-4b7a-bce7-482503ba8f7d-catalog-content\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.010752 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b600d64-aa25-4b7a-bce7-482503ba8f7d-utilities\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.103895 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5v6h4"] Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.105222 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.108923 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.112352 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh7s6\" (UniqueName: \"kubernetes.io/projected/0b600d64-aa25-4b7a-bce7-482503ba8f7d-kube-api-access-zh7s6\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.112435 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b600d64-aa25-4b7a-bce7-482503ba8f7d-catalog-content\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.112473 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2825e8f7-d99d-4f07-a6db-b1c976946b30-catalog-content\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.112526 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b600d64-aa25-4b7a-bce7-482503ba8f7d-utilities\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.112664 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwfcl\" (UniqueName: \"kubernetes.io/projected/2825e8f7-d99d-4f07-a6db-b1c976946b30-kube-api-access-wwfcl\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.112768 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2825e8f7-d99d-4f07-a6db-b1c976946b30-utilities\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.113088 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b600d64-aa25-4b7a-bce7-482503ba8f7d-utilities\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.113368 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b600d64-aa25-4b7a-bce7-482503ba8f7d-catalog-content\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.126465 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5v6h4"] Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.138399 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh7s6\" (UniqueName: \"kubernetes.io/projected/0b600d64-aa25-4b7a-bce7-482503ba8f7d-kube-api-access-zh7s6\") pod \"certified-operators-nwqwt\" (UID: \"0b600d64-aa25-4b7a-bce7-482503ba8f7d\") " pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.213913 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2825e8f7-d99d-4f07-a6db-b1c976946b30-catalog-content\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.213991 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwfcl\" (UniqueName: \"kubernetes.io/projected/2825e8f7-d99d-4f07-a6db-b1c976946b30-kube-api-access-wwfcl\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.214214 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2825e8f7-d99d-4f07-a6db-b1c976946b30-utilities\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.214616 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2825e8f7-d99d-4f07-a6db-b1c976946b30-catalog-content\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.214703 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2825e8f7-d99d-4f07-a6db-b1c976946b30-utilities\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.240711 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwfcl\" (UniqueName: \"kubernetes.io/projected/2825e8f7-d99d-4f07-a6db-b1c976946b30-kube-api-access-wwfcl\") pod \"redhat-operators-5v6h4\" (UID: \"2825e8f7-d99d-4f07-a6db-b1c976946b30\") " pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.247734 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.423272 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.684117 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwqwt"] Dec 01 06:56:48 crc kubenswrapper[4822]: W1201 06:56:48.691150 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b600d64_aa25_4b7a_bce7_482503ba8f7d.slice/crio-b25df4fe5bacfcc8020d03a210b301aae3651df5def0c5322c484878b2264d89 WatchSource:0}: Error finding container b25df4fe5bacfcc8020d03a210b301aae3651df5def0c5322c484878b2264d89: Status 404 returned error can't find the container with id b25df4fe5bacfcc8020d03a210b301aae3651df5def0c5322c484878b2264d89 Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.903203 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5v6h4"] Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.924995 4822 generic.go:334] "Generic (PLEG): container finished" podID="0b600d64-aa25-4b7a-bce7-482503ba8f7d" containerID="aa36f9f5d29c0da3660a0c4620f9c01fb174d03ffc08093e1047043fa875e45f" exitCode=0 Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.925116 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwqwt" event={"ID":"0b600d64-aa25-4b7a-bce7-482503ba8f7d","Type":"ContainerDied","Data":"aa36f9f5d29c0da3660a0c4620f9c01fb174d03ffc08093e1047043fa875e45f"} Dec 01 06:56:48 crc kubenswrapper[4822]: I1201 06:56:48.926435 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwqwt" event={"ID":"0b600d64-aa25-4b7a-bce7-482503ba8f7d","Type":"ContainerStarted","Data":"b25df4fe5bacfcc8020d03a210b301aae3651df5def0c5322c484878b2264d89"} Dec 01 06:56:48 crc kubenswrapper[4822]: W1201 06:56:48.990123 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2825e8f7_d99d_4f07_a6db_b1c976946b30.slice/crio-10bdde11631d086769c2e679886e0c08477be980f4ef271fc1d3106fd70861d3 WatchSource:0}: Error finding container 10bdde11631d086769c2e679886e0c08477be980f4ef271fc1d3106fd70861d3: Status 404 returned error can't find the container with id 10bdde11631d086769c2e679886e0c08477be980f4ef271fc1d3106fd70861d3 Dec 01 06:56:49 crc kubenswrapper[4822]: E1201 06:56:49.231604 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2825e8f7_d99d_4f07_a6db_b1c976946b30.slice/crio-bc38e09317cfef47ef1f36ec37c02a6146cfd5a88f3448f19439e41e4d101453.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2825e8f7_d99d_4f07_a6db_b1c976946b30.slice/crio-conmon-bc38e09317cfef47ef1f36ec37c02a6146cfd5a88f3448f19439e41e4d101453.scope\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:49 crc kubenswrapper[4822]: I1201 06:56:49.936650 4822 generic.go:334] "Generic (PLEG): container finished" podID="2825e8f7-d99d-4f07-a6db-b1c976946b30" containerID="bc38e09317cfef47ef1f36ec37c02a6146cfd5a88f3448f19439e41e4d101453" exitCode=0 Dec 01 06:56:49 crc kubenswrapper[4822]: I1201 06:56:49.936744 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5v6h4" event={"ID":"2825e8f7-d99d-4f07-a6db-b1c976946b30","Type":"ContainerDied","Data":"bc38e09317cfef47ef1f36ec37c02a6146cfd5a88f3448f19439e41e4d101453"} Dec 01 06:56:49 crc kubenswrapper[4822]: I1201 06:56:49.937161 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5v6h4" event={"ID":"2825e8f7-d99d-4f07-a6db-b1c976946b30","Type":"ContainerStarted","Data":"10bdde11631d086769c2e679886e0c08477be980f4ef271fc1d3106fd70861d3"} Dec 01 06:56:49 crc kubenswrapper[4822]: I1201 06:56:49.945773 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwqwt" event={"ID":"0b600d64-aa25-4b7a-bce7-482503ba8f7d","Type":"ContainerStarted","Data":"1e7b75e441fdaaf4d90ba2992a2683dbd6af54e1f775976b053ddcaae9ccdedc"} Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.307705 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sbxr5"] Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.309382 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.312441 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.336169 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sbxr5"] Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.345508 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4ppw\" (UniqueName: \"kubernetes.io/projected/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-kube-api-access-w4ppw\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.345637 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-catalog-content\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.345764 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-utilities\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.446694 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-catalog-content\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.446885 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-utilities\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.446977 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4ppw\" (UniqueName: \"kubernetes.io/projected/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-kube-api-access-w4ppw\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.447618 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-catalog-content\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.447779 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-utilities\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.478028 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4ppw\" (UniqueName: \"kubernetes.io/projected/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-kube-api-access-w4ppw\") pod \"community-operators-sbxr5\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.514061 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8fscd"] Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.515669 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.519761 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.525859 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8fscd"] Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.547908 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c77349ec-bedd-4aba-a58f-b328a5f4b877-utilities\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.547968 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c77349ec-bedd-4aba-a58f-b328a5f4b877-catalog-content\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.548177 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wnqk\" (UniqueName: \"kubernetes.io/projected/c77349ec-bedd-4aba-a58f-b328a5f4b877-kube-api-access-6wnqk\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.637598 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.649877 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wnqk\" (UniqueName: \"kubernetes.io/projected/c77349ec-bedd-4aba-a58f-b328a5f4b877-kube-api-access-6wnqk\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.649998 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c77349ec-bedd-4aba-a58f-b328a5f4b877-utilities\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.650040 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c77349ec-bedd-4aba-a58f-b328a5f4b877-catalog-content\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.650730 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c77349ec-bedd-4aba-a58f-b328a5f4b877-catalog-content\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.651364 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c77349ec-bedd-4aba-a58f-b328a5f4b877-utilities\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.684436 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wnqk\" (UniqueName: \"kubernetes.io/projected/c77349ec-bedd-4aba-a58f-b328a5f4b877-kube-api-access-6wnqk\") pod \"redhat-marketplace-8fscd\" (UID: \"c77349ec-bedd-4aba-a58f-b328a5f4b877\") " pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.837799 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.955417 4822 generic.go:334] "Generic (PLEG): container finished" podID="0b600d64-aa25-4b7a-bce7-482503ba8f7d" containerID="1e7b75e441fdaaf4d90ba2992a2683dbd6af54e1f775976b053ddcaae9ccdedc" exitCode=0 Dec 01 06:56:50 crc kubenswrapper[4822]: I1201 06:56:50.966015 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwqwt" event={"ID":"0b600d64-aa25-4b7a-bce7-482503ba8f7d","Type":"ContainerDied","Data":"1e7b75e441fdaaf4d90ba2992a2683dbd6af54e1f775976b053ddcaae9ccdedc"} Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.114074 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sbxr5"] Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.320652 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8fscd"] Dec 01 06:56:51 crc kubenswrapper[4822]: W1201 06:56:51.343174 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc77349ec_bedd_4aba_a58f_b328a5f4b877.slice/crio-cada089c01f4680d5985ac63e1d3c752900517dc4e09bdc820177204da3d3eed WatchSource:0}: Error finding container cada089c01f4680d5985ac63e1d3c752900517dc4e09bdc820177204da3d3eed: Status 404 returned error can't find the container with id cada089c01f4680d5985ac63e1d3c752900517dc4e09bdc820177204da3d3eed Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.972966 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5v6h4" event={"ID":"2825e8f7-d99d-4f07-a6db-b1c976946b30","Type":"ContainerStarted","Data":"174f2932711b3e153ec79c77df72f1b2bcc6c46a6ad961a0008e25e46fba144d"} Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.975353 4822 generic.go:334] "Generic (PLEG): container finished" podID="c77349ec-bedd-4aba-a58f-b328a5f4b877" containerID="592fe6dff193855664ec3cb081478d6388709754868f5a5291fee9f5d17df27a" exitCode=0 Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.975437 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8fscd" event={"ID":"c77349ec-bedd-4aba-a58f-b328a5f4b877","Type":"ContainerDied","Data":"592fe6dff193855664ec3cb081478d6388709754868f5a5291fee9f5d17df27a"} Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.975477 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8fscd" event={"ID":"c77349ec-bedd-4aba-a58f-b328a5f4b877","Type":"ContainerStarted","Data":"cada089c01f4680d5985ac63e1d3c752900517dc4e09bdc820177204da3d3eed"} Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.979814 4822 generic.go:334] "Generic (PLEG): container finished" podID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerID="5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7" exitCode=0 Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.979901 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerDied","Data":"5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7"} Dec 01 06:56:51 crc kubenswrapper[4822]: I1201 06:56:51.979957 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerStarted","Data":"568ce3e050c4f53b7f57731650975cc2b82893ec4f59e55b75c10cc67cdff560"} Dec 01 06:56:52 crc kubenswrapper[4822]: I1201 06:56:52.988531 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerStarted","Data":"cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38"} Dec 01 06:56:52 crc kubenswrapper[4822]: I1201 06:56:52.992722 4822 generic.go:334] "Generic (PLEG): container finished" podID="2825e8f7-d99d-4f07-a6db-b1c976946b30" containerID="174f2932711b3e153ec79c77df72f1b2bcc6c46a6ad961a0008e25e46fba144d" exitCode=0 Dec 01 06:56:52 crc kubenswrapper[4822]: I1201 06:56:52.993020 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5v6h4" event={"ID":"2825e8f7-d99d-4f07-a6db-b1c976946b30","Type":"ContainerDied","Data":"174f2932711b3e153ec79c77df72f1b2bcc6c46a6ad961a0008e25e46fba144d"} Dec 01 06:56:52 crc kubenswrapper[4822]: I1201 06:56:52.995021 4822 generic.go:334] "Generic (PLEG): container finished" podID="c77349ec-bedd-4aba-a58f-b328a5f4b877" containerID="842b3c44027116a97156c637cf1518a4af6e3ce163269277bca90c995059ba90" exitCode=0 Dec 01 06:56:52 crc kubenswrapper[4822]: I1201 06:56:52.995107 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8fscd" event={"ID":"c77349ec-bedd-4aba-a58f-b328a5f4b877","Type":"ContainerDied","Data":"842b3c44027116a97156c637cf1518a4af6e3ce163269277bca90c995059ba90"} Dec 01 06:56:53 crc kubenswrapper[4822]: I1201 06:56:53.002431 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwqwt" event={"ID":"0b600d64-aa25-4b7a-bce7-482503ba8f7d","Type":"ContainerStarted","Data":"8af67379870fcc1c54ebb37188cca6797897aa3eb8d83eb7e1dc61dccbefe70c"} Dec 01 06:56:53 crc kubenswrapper[4822]: I1201 06:56:53.100640 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nwqwt" podStartSLOduration=2.587359466 podStartE2EDuration="6.100617333s" podCreationTimestamp="2025-12-01 06:56:47 +0000 UTC" firstStartedPulling="2025-12-01 06:56:48.926881532 +0000 UTC m=+364.247689218" lastFinishedPulling="2025-12-01 06:56:52.440139359 +0000 UTC m=+367.760947085" observedRunningTime="2025-12-01 06:56:53.09680837 +0000 UTC m=+368.417616056" watchObservedRunningTime="2025-12-01 06:56:53.100617333 +0000 UTC m=+368.421425029" Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.012417 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5v6h4" event={"ID":"2825e8f7-d99d-4f07-a6db-b1c976946b30","Type":"ContainerStarted","Data":"2cb0409bb748dd0009f7cb49d625530dbc479a1a2da498de31d2674ab3665de7"} Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.015116 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8fscd" event={"ID":"c77349ec-bedd-4aba-a58f-b328a5f4b877","Type":"ContainerStarted","Data":"d470c05d8d6c81e69762f331ac3cb22d438edbad9f243eea8b958be870e610e3"} Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.017396 4822 generic.go:334] "Generic (PLEG): container finished" podID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerID="cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38" exitCode=0 Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.017510 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerDied","Data":"cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38"} Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.044689 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5v6h4" podStartSLOduration=2.509194781 podStartE2EDuration="6.044668387s" podCreationTimestamp="2025-12-01 06:56:48 +0000 UTC" firstStartedPulling="2025-12-01 06:56:49.939379255 +0000 UTC m=+365.260186951" lastFinishedPulling="2025-12-01 06:56:53.474852871 +0000 UTC m=+368.795660557" observedRunningTime="2025-12-01 06:56:54.041921936 +0000 UTC m=+369.362729632" watchObservedRunningTime="2025-12-01 06:56:54.044668387 +0000 UTC m=+369.365476083" Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.092331 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8fscd" podStartSLOduration=2.617291213 podStartE2EDuration="4.0923134s" podCreationTimestamp="2025-12-01 06:56:50 +0000 UTC" firstStartedPulling="2025-12-01 06:56:51.978746358 +0000 UTC m=+367.299554054" lastFinishedPulling="2025-12-01 06:56:53.453768555 +0000 UTC m=+368.774576241" observedRunningTime="2025-12-01 06:56:54.09129583 +0000 UTC m=+369.412103516" watchObservedRunningTime="2025-12-01 06:56:54.0923134 +0000 UTC m=+369.413121076" Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.455070 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-b66rx" Dec 01 06:56:54 crc kubenswrapper[4822]: I1201 06:56:54.526990 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9f7zg"] Dec 01 06:56:55 crc kubenswrapper[4822]: I1201 06:56:55.037822 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerStarted","Data":"b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382"} Dec 01 06:56:55 crc kubenswrapper[4822]: I1201 06:56:55.071392 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sbxr5" podStartSLOduration=2.618094967 podStartE2EDuration="5.071368972s" podCreationTimestamp="2025-12-01 06:56:50 +0000 UTC" firstStartedPulling="2025-12-01 06:56:51.995301599 +0000 UTC m=+367.316109295" lastFinishedPulling="2025-12-01 06:56:54.448575594 +0000 UTC m=+369.769383300" observedRunningTime="2025-12-01 06:56:55.067035073 +0000 UTC m=+370.387842759" watchObservedRunningTime="2025-12-01 06:56:55.071368972 +0000 UTC m=+370.392176658" Dec 01 06:56:58 crc kubenswrapper[4822]: I1201 06:56:58.248588 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:58 crc kubenswrapper[4822]: I1201 06:56:58.248967 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:58 crc kubenswrapper[4822]: I1201 06:56:58.308298 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:58 crc kubenswrapper[4822]: I1201 06:56:58.426380 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:58 crc kubenswrapper[4822]: I1201 06:56:58.426625 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:56:59 crc kubenswrapper[4822]: I1201 06:56:59.129247 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nwqwt" Dec 01 06:56:59 crc kubenswrapper[4822]: I1201 06:56:59.497603 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5v6h4" podUID="2825e8f7-d99d-4f07-a6db-b1c976946b30" containerName="registry-server" probeResult="failure" output=< Dec 01 06:56:59 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 06:56:59 crc kubenswrapper[4822]: > Dec 01 06:57:00 crc kubenswrapper[4822]: I1201 06:57:00.639820 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:57:00 crc kubenswrapper[4822]: I1201 06:57:00.640342 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:57:00 crc kubenswrapper[4822]: I1201 06:57:00.690255 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:57:00 crc kubenswrapper[4822]: I1201 06:57:00.838930 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:57:00 crc kubenswrapper[4822]: I1201 06:57:00.839026 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:57:00 crc kubenswrapper[4822]: I1201 06:57:00.905136 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:57:01 crc kubenswrapper[4822]: I1201 06:57:01.149715 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8fscd" Dec 01 06:57:01 crc kubenswrapper[4822]: I1201 06:57:01.166513 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 06:57:08 crc kubenswrapper[4822]: I1201 06:57:08.498267 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:57:08 crc kubenswrapper[4822]: I1201 06:57:08.561299 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5v6h4" Dec 01 06:57:12 crc kubenswrapper[4822]: I1201 06:57:12.543293 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:57:12 crc kubenswrapper[4822]: I1201 06:57:12.543638 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:57:19 crc kubenswrapper[4822]: I1201 06:57:19.588484 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" podUID="ba3ade7b-ad1c-4f60-9cde-f7a198336912" containerName="registry" containerID="cri-o://666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019" gracePeriod=30 Dec 01 06:57:19 crc kubenswrapper[4822]: E1201 06:57:19.657524 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba3ade7b_ad1c_4f60_9cde_f7a198336912.slice/crio-666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019.scope\": RecentStats: unable to find data in memory cache]" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.035186 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.176924 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-certificates\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177043 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba3ade7b-ad1c-4f60-9cde-f7a198336912-installation-pull-secrets\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177098 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-trusted-ca\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177256 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177282 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrn7l\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-kube-api-access-hrn7l\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177321 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba3ade7b-ad1c-4f60-9cde-f7a198336912-ca-trust-extracted\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177354 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-tls\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.177389 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-bound-sa-token\") pod \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\" (UID: \"ba3ade7b-ad1c-4f60-9cde-f7a198336912\") " Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.178145 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.178212 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.190080 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.190638 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-kube-api-access-hrn7l" (OuterVolumeSpecName: "kube-api-access-hrn7l") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "kube-api-access-hrn7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.193470 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.194364 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba3ade7b-ad1c-4f60-9cde-f7a198336912-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.194866 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.201272 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba3ade7b-ad1c-4f60-9cde-f7a198336912-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ba3ade7b-ad1c-4f60-9cde-f7a198336912" (UID: "ba3ade7b-ad1c-4f60-9cde-f7a198336912"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.216162 4822 generic.go:334] "Generic (PLEG): container finished" podID="ba3ade7b-ad1c-4f60-9cde-f7a198336912" containerID="666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019" exitCode=0 Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.216222 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.216258 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" event={"ID":"ba3ade7b-ad1c-4f60-9cde-f7a198336912","Type":"ContainerDied","Data":"666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019"} Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.216849 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9f7zg" event={"ID":"ba3ade7b-ad1c-4f60-9cde-f7a198336912","Type":"ContainerDied","Data":"ec37137fd117fc16fe56e25baf244ff6b523f75bd244885657fcb647df0827b1"} Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.216872 4822 scope.go:117] "RemoveContainer" containerID="666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.241672 4822 scope.go:117] "RemoveContainer" containerID="666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019" Dec 01 06:57:20 crc kubenswrapper[4822]: E1201 06:57:20.246845 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019\": container with ID starting with 666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019 not found: ID does not exist" containerID="666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.246907 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019"} err="failed to get container status \"666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019\": rpc error: code = NotFound desc = could not find container \"666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019\": container with ID starting with 666c98791ce3560d1c03e22e6179ad3c7ed2cb71c4d704d087c2dc1b2bc28019 not found: ID does not exist" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.247120 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9f7zg"] Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.258626 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9f7zg"] Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278823 4822 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba3ade7b-ad1c-4f60-9cde-f7a198336912-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278869 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278885 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrn7l\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-kube-api-access-hrn7l\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278894 4822 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba3ade7b-ad1c-4f60-9cde-f7a198336912-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278906 4822 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278915 4822 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3ade7b-ad1c-4f60-9cde-f7a198336912-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.278926 4822 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba3ade7b-ad1c-4f60-9cde-f7a198336912-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:20 crc kubenswrapper[4822]: I1201 06:57:20.960719 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba3ade7b-ad1c-4f60-9cde-f7a198336912" path="/var/lib/kubelet/pods/ba3ade7b-ad1c-4f60-9cde-f7a198336912/volumes" Dec 01 06:57:42 crc kubenswrapper[4822]: I1201 06:57:42.543413 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:57:42 crc kubenswrapper[4822]: I1201 06:57:42.544151 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:57:42 crc kubenswrapper[4822]: I1201 06:57:42.544220 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 06:57:42 crc kubenswrapper[4822]: I1201 06:57:42.545021 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"722f940bf0dc20661a5ffefc04398ee7a3b154f90f89e6da1487f7220d930232"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:57:42 crc kubenswrapper[4822]: I1201 06:57:42.545120 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://722f940bf0dc20661a5ffefc04398ee7a3b154f90f89e6da1487f7220d930232" gracePeriod=600 Dec 01 06:57:43 crc kubenswrapper[4822]: I1201 06:57:43.399413 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="722f940bf0dc20661a5ffefc04398ee7a3b154f90f89e6da1487f7220d930232" exitCode=0 Dec 01 06:57:43 crc kubenswrapper[4822]: I1201 06:57:43.399465 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"722f940bf0dc20661a5ffefc04398ee7a3b154f90f89e6da1487f7220d930232"} Dec 01 06:57:43 crc kubenswrapper[4822]: I1201 06:57:43.400113 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"3bc601827ebd06a71930384d1ac078a53dc216ae11dbb095771eb71fc08f11e8"} Dec 01 06:57:43 crc kubenswrapper[4822]: I1201 06:57:43.400156 4822 scope.go:117] "RemoveContainer" containerID="102240432a3443d2c862083bdac7ae90c19792143bbf463bdc8047284ce237f3" Dec 01 06:59:42 crc kubenswrapper[4822]: I1201 06:59:42.543245 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:59:42 crc kubenswrapper[4822]: I1201 06:59:42.544108 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.209780 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b"] Dec 01 07:00:00 crc kubenswrapper[4822]: E1201 07:00:00.210540 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba3ade7b-ad1c-4f60-9cde-f7a198336912" containerName="registry" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.210590 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba3ade7b-ad1c-4f60-9cde-f7a198336912" containerName="registry" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.210691 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba3ade7b-ad1c-4f60-9cde-f7a198336912" containerName="registry" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.211105 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.214246 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.214354 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.227214 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b"] Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.272351 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xx9z\" (UniqueName: \"kubernetes.io/projected/8170a546-833c-4ef5-8111-ffabc9faf33a-kube-api-access-8xx9z\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.272423 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8170a546-833c-4ef5-8111-ffabc9faf33a-config-volume\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.272511 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8170a546-833c-4ef5-8111-ffabc9faf33a-secret-volume\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.373639 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xx9z\" (UniqueName: \"kubernetes.io/projected/8170a546-833c-4ef5-8111-ffabc9faf33a-kube-api-access-8xx9z\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.373733 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8170a546-833c-4ef5-8111-ffabc9faf33a-config-volume\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.373788 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8170a546-833c-4ef5-8111-ffabc9faf33a-secret-volume\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.378815 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8170a546-833c-4ef5-8111-ffabc9faf33a-config-volume\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.387742 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8170a546-833c-4ef5-8111-ffabc9faf33a-secret-volume\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.406033 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xx9z\" (UniqueName: \"kubernetes.io/projected/8170a546-833c-4ef5-8111-ffabc9faf33a-kube-api-access-8xx9z\") pod \"collect-profiles-29409540-xsr8b\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.536127 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:00 crc kubenswrapper[4822]: I1201 07:00:00.994821 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b"] Dec 01 07:00:01 crc kubenswrapper[4822]: I1201 07:00:01.444147 4822 generic.go:334] "Generic (PLEG): container finished" podID="8170a546-833c-4ef5-8111-ffabc9faf33a" containerID="bda96f79472d7d55f3bea2e6548d70a4afc1ebaa3a1ddd69a92cd9218e9d3a29" exitCode=0 Dec 01 07:00:01 crc kubenswrapper[4822]: I1201 07:00:01.444438 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" event={"ID":"8170a546-833c-4ef5-8111-ffabc9faf33a","Type":"ContainerDied","Data":"bda96f79472d7d55f3bea2e6548d70a4afc1ebaa3a1ddd69a92cd9218e9d3a29"} Dec 01 07:00:01 crc kubenswrapper[4822]: I1201 07:00:01.444753 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" event={"ID":"8170a546-833c-4ef5-8111-ffabc9faf33a","Type":"ContainerStarted","Data":"3bda43129ba08133ab3af334ed6b74a44e327f1b31fe15e5b54db0c248539a13"} Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.716709 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.810489 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8170a546-833c-4ef5-8111-ffabc9faf33a-config-volume\") pod \"8170a546-833c-4ef5-8111-ffabc9faf33a\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.810647 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xx9z\" (UniqueName: \"kubernetes.io/projected/8170a546-833c-4ef5-8111-ffabc9faf33a-kube-api-access-8xx9z\") pod \"8170a546-833c-4ef5-8111-ffabc9faf33a\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.810893 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8170a546-833c-4ef5-8111-ffabc9faf33a-secret-volume\") pod \"8170a546-833c-4ef5-8111-ffabc9faf33a\" (UID: \"8170a546-833c-4ef5-8111-ffabc9faf33a\") " Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.812288 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8170a546-833c-4ef5-8111-ffabc9faf33a-config-volume" (OuterVolumeSpecName: "config-volume") pod "8170a546-833c-4ef5-8111-ffabc9faf33a" (UID: "8170a546-833c-4ef5-8111-ffabc9faf33a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.817270 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8170a546-833c-4ef5-8111-ffabc9faf33a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8170a546-833c-4ef5-8111-ffabc9faf33a" (UID: "8170a546-833c-4ef5-8111-ffabc9faf33a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.818432 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8170a546-833c-4ef5-8111-ffabc9faf33a-kube-api-access-8xx9z" (OuterVolumeSpecName: "kube-api-access-8xx9z") pod "8170a546-833c-4ef5-8111-ffabc9faf33a" (UID: "8170a546-833c-4ef5-8111-ffabc9faf33a"). InnerVolumeSpecName "kube-api-access-8xx9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.914386 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8170a546-833c-4ef5-8111-ffabc9faf33a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.914460 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xx9z\" (UniqueName: \"kubernetes.io/projected/8170a546-833c-4ef5-8111-ffabc9faf33a-kube-api-access-8xx9z\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:02 crc kubenswrapper[4822]: I1201 07:00:02.914487 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8170a546-833c-4ef5-8111-ffabc9faf33a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:03 crc kubenswrapper[4822]: I1201 07:00:03.461024 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" event={"ID":"8170a546-833c-4ef5-8111-ffabc9faf33a","Type":"ContainerDied","Data":"3bda43129ba08133ab3af334ed6b74a44e327f1b31fe15e5b54db0c248539a13"} Dec 01 07:00:03 crc kubenswrapper[4822]: I1201 07:00:03.461093 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b" Dec 01 07:00:03 crc kubenswrapper[4822]: I1201 07:00:03.461121 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bda43129ba08133ab3af334ed6b74a44e327f1b31fe15e5b54db0c248539a13" Dec 01 07:00:12 crc kubenswrapper[4822]: I1201 07:00:12.543170 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:00:12 crc kubenswrapper[4822]: I1201 07:00:12.543929 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.542955 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.543509 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.543571 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.543980 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3bc601827ebd06a71930384d1ac078a53dc216ae11dbb095771eb71fc08f11e8"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.544026 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://3bc601827ebd06a71930384d1ac078a53dc216ae11dbb095771eb71fc08f11e8" gracePeriod=600 Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.788039 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="3bc601827ebd06a71930384d1ac078a53dc216ae11dbb095771eb71fc08f11e8" exitCode=0 Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.788123 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"3bc601827ebd06a71930384d1ac078a53dc216ae11dbb095771eb71fc08f11e8"} Dec 01 07:00:42 crc kubenswrapper[4822]: I1201 07:00:42.788472 4822 scope.go:117] "RemoveContainer" containerID="722f940bf0dc20661a5ffefc04398ee7a3b154f90f89e6da1487f7220d930232" Dec 01 07:00:43 crc kubenswrapper[4822]: I1201 07:00:43.812277 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"338b930b0a920daac3d47f96c28535256b673fb917325f97766ffd05e922ff93"} Dec 01 07:02:42 crc kubenswrapper[4822]: I1201 07:02:42.543243 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:02:42 crc kubenswrapper[4822]: I1201 07:02:42.543922 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.034730 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-75mdq"] Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036121 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-controller" containerID="cri-o://60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036518 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="sbdb" containerID="cri-o://e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036600 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="nbdb" containerID="cri-o://c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036647 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="northd" containerID="cri-o://2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036692 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036733 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-node" containerID="cri-o://4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.036772 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-acl-logging" containerID="cri-o://3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.087181 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" containerID="cri-o://a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" gracePeriod=30 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.381917 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/3.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.383833 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovn-acl-logging/0.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.384226 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovn-controller/0.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.384514 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.433794 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bgbks"] Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434118 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434137 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434148 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kubecfg-setup" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434155 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kubecfg-setup" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434168 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434175 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434189 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="northd" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434196 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="northd" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434204 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="sbdb" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434209 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="sbdb" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434218 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434224 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434231 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434238 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434250 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-node" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434259 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-node" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434268 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8170a546-833c-4ef5-8111-ffabc9faf33a" containerName="collect-profiles" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434275 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8170a546-833c-4ef5-8111-ffabc9faf33a" containerName="collect-profiles" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434287 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-acl-logging" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434294 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-acl-logging" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434305 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="nbdb" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434312 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="nbdb" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434321 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434326 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434437 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434447 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8170a546-833c-4ef5-8111-ffabc9faf33a" containerName="collect-profiles" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434476 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434484 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434492 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="nbdb" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434498 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434505 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434515 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="northd" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434525 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovn-acl-logging" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434535 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="sbdb" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434659 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="kube-rbac-proxy-node" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.434765 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434773 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434873 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.434885 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: E1201 07:03:05.435006 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.435015 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8284d339-ff12-453a-be42-4540e44252ee" containerName="ovnkube-controller" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.437223 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.440232 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-cni-netd\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.440305 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-etc-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.440400 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovn-node-metrics-cert\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.440600 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-kubelet\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.440666 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-cni-bin\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.441926 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-env-overrides\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442001 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovnkube-script-lib\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442034 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442058 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-node-log\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442184 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-systemd\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442217 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442277 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-log-socket\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442327 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n64kh\" (UniqueName: \"kubernetes.io/projected/d1375b56-b05b-484a-aa16-69f33b4c6c6c-kube-api-access-n64kh\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442355 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovnkube-config\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442425 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-var-lib-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442482 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-systemd-units\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442581 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-run-netns\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442606 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-ovn\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442672 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-run-ovn-kubernetes\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.442742 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-slash\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543526 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-ovn-kubernetes\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543763 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-openvswitch\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543806 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-bin\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543826 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-netns\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543892 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-var-lib-cni-networks-ovn-kubernetes\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543923 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8284d339-ff12-453a-be42-4540e44252ee-ovn-node-metrics-cert\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543964 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-systemd-units\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543987 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-config\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544014 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-log-socket\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544051 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlfwt\" (UniqueName: \"kubernetes.io/projected/8284d339-ff12-453a-be42-4540e44252ee-kube-api-access-vlfwt\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.543678 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544108 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-node-log" (OuterVolumeSpecName: "node-log") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544138 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544081 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-node-log\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544173 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544202 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544265 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-script-lib\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544361 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-kubelet\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544411 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-env-overrides\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544508 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-var-lib-openvswitch\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544585 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-netd\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544688 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-slash\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544744 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-etc-openvswitch\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544801 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-ovn\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544848 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-systemd\") pod \"8284d339-ff12-453a-be42-4540e44252ee\" (UID: \"8284d339-ff12-453a-be42-4540e44252ee\") " Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545224 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-var-lib-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545307 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-systemd-units\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545403 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-run-netns\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545471 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-ovn\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545582 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-run-ovn-kubernetes\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545666 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-slash\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545723 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-cni-netd\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545781 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-etc-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545873 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovn-node-metrics-cert\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545981 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-kubelet\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546095 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-cni-bin\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546162 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-env-overrides\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546214 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovnkube-script-lib\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546277 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546376 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-node-log\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546403 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-var-lib-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545233 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.544024 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545301 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545328 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545348 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-slash" (OuterVolumeSpecName: "host-slash") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545659 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546493 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-systemd\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546513 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546528 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-cni-netd\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546585 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-systemd-units\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546569 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-log-socket\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546623 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-run-netns\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546651 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-ovn\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546664 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-etc-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545684 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.545957 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546274 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546307 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546324 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-log-socket" (OuterVolumeSpecName: "log-socket") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546339 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546741 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-systemd\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546767 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546878 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-node-log\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546968 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-kubelet\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547030 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-cni-bin\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546379 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-slash\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546534 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-log-socket\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547243 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovnkube-config\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547295 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n64kh\" (UniqueName: \"kubernetes.io/projected/d1375b56-b05b-484a-aa16-69f33b4c6c6c-kube-api-access-n64kh\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547415 4822 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547443 4822 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.546678 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-host-run-ovn-kubernetes\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547471 4822 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547592 4822 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547615 4822 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547650 4822 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547675 4822 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547701 4822 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547709 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1375b56-b05b-484a-aa16-69f33b4c6c6c-run-openvswitch\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547728 4822 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547831 4822 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-log-socket\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547850 4822 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-node-log\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547869 4822 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547888 4822 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547914 4822 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8284d339-ff12-453a-be42-4540e44252ee-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.547933 4822 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.550730 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8284d339-ff12-453a-be42-4540e44252ee-kube-api-access-vlfwt" (OuterVolumeSpecName: "kube-api-access-vlfwt") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "kube-api-access-vlfwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.552630 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovnkube-config\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.552937 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8284d339-ff12-453a-be42-4540e44252ee-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.553140 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovnkube-script-lib\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.553870 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1375b56-b05b-484a-aa16-69f33b4c6c6c-env-overrides\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.556399 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1375b56-b05b-484a-aa16-69f33b4c6c6c-ovn-node-metrics-cert\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.567864 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "8284d339-ff12-453a-be42-4540e44252ee" (UID: "8284d339-ff12-453a-be42-4540e44252ee"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.573203 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n64kh\" (UniqueName: \"kubernetes.io/projected/d1375b56-b05b-484a-aa16-69f33b4c6c6c-kube-api-access-n64kh\") pod \"ovnkube-node-bgbks\" (UID: \"d1375b56-b05b-484a-aa16-69f33b4c6c6c\") " pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.649167 4822 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.649212 4822 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-host-slash\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.649226 4822 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8284d339-ff12-453a-be42-4540e44252ee-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.649240 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8284d339-ff12-453a-be42-4540e44252ee-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.649252 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlfwt\" (UniqueName: \"kubernetes.io/projected/8284d339-ff12-453a-be42-4540e44252ee-kube-api-access-vlfwt\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.752837 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.896878 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/2.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.897585 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/1.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.897641 4822 generic.go:334] "Generic (PLEG): container finished" podID="34b58185-4742-4187-9243-860433c413d8" containerID="1c1cebe76b95f0676ed78339399a49cb488c2817e6e376c7a0fbc8b707ce3c8a" exitCode=2 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.897822 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerDied","Data":"1c1cebe76b95f0676ed78339399a49cb488c2817e6e376c7a0fbc8b707ce3c8a"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.897881 4822 scope.go:117] "RemoveContainer" containerID="286b5eec5380bce867f5f628bb82ce645ccb06c9b5efd892b1530a323d2f97e6" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.898602 4822 scope.go:117] "RemoveContainer" containerID="1c1cebe76b95f0676ed78339399a49cb488c2817e6e376c7a0fbc8b707ce3c8a" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.902689 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"45b64a4c4294069f4d890bc6465e101c1a767d146abb6f3c7bb2df635986198a"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.911142 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovnkube-controller/3.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.915306 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovn-acl-logging/0.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.916114 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-75mdq_8284d339-ff12-453a-be42-4540e44252ee/ovn-controller/0.log" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918796 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" exitCode=0 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918843 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" exitCode=0 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918859 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" exitCode=0 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918873 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" exitCode=0 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918888 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" exitCode=0 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918900 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" exitCode=0 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918913 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" exitCode=143 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918927 4822 generic.go:334] "Generic (PLEG): container finished" podID="8284d339-ff12-453a-be42-4540e44252ee" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" exitCode=143 Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918959 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.918994 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919019 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919040 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919061 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919080 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919099 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919115 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919127 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919138 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919150 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919161 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919171 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919183 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919194 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919208 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919223 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919239 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919251 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919262 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919273 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919284 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919295 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919306 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919317 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919328 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919339 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919355 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919371 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919383 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919394 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919405 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919415 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919426 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919437 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919447 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919457 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919468 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919482 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" event={"ID":"8284d339-ff12-453a-be42-4540e44252ee","Type":"ContainerDied","Data":"0d88d1549bb52a904a435414e0330cdd92fde7214b0f3ef3b0470295cabbfbf6"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919497 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919509 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919520 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919532 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919542 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919585 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919596 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919606 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919616 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919627 4822 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.919767 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-75mdq" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.943987 4822 scope.go:117] "RemoveContainer" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" Dec 01 07:03:05 crc kubenswrapper[4822]: I1201 07:03:05.981413 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.011696 4822 scope.go:117] "RemoveContainer" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.019985 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-75mdq"] Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.023471 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-75mdq"] Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.032326 4822 scope.go:117] "RemoveContainer" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.045339 4822 scope.go:117] "RemoveContainer" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.062847 4822 scope.go:117] "RemoveContainer" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.079686 4822 scope.go:117] "RemoveContainer" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.092368 4822 scope.go:117] "RemoveContainer" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.103863 4822 scope.go:117] "RemoveContainer" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.117422 4822 scope.go:117] "RemoveContainer" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.131951 4822 scope.go:117] "RemoveContainer" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.132274 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": container with ID starting with a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8 not found: ID does not exist" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.132321 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} err="failed to get container status \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": rpc error: code = NotFound desc = could not find container \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": container with ID starting with a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.132353 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.132781 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": container with ID starting with 3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143 not found: ID does not exist" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.132815 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} err="failed to get container status \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": rpc error: code = NotFound desc = could not find container \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": container with ID starting with 3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.132838 4822 scope.go:117] "RemoveContainer" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.133067 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": container with ID starting with e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382 not found: ID does not exist" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.133105 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} err="failed to get container status \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": rpc error: code = NotFound desc = could not find container \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": container with ID starting with e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.133135 4822 scope.go:117] "RemoveContainer" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.133638 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": container with ID starting with c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1 not found: ID does not exist" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.133680 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} err="failed to get container status \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": rpc error: code = NotFound desc = could not find container \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": container with ID starting with c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.133705 4822 scope.go:117] "RemoveContainer" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.134235 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": container with ID starting with 2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281 not found: ID does not exist" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.134276 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} err="failed to get container status \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": rpc error: code = NotFound desc = could not find container \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": container with ID starting with 2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.134303 4822 scope.go:117] "RemoveContainer" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.134748 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": container with ID starting with 50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca not found: ID does not exist" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.134789 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} err="failed to get container status \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": rpc error: code = NotFound desc = could not find container \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": container with ID starting with 50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.134819 4822 scope.go:117] "RemoveContainer" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.135113 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": container with ID starting with 4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade not found: ID does not exist" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.135150 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} err="failed to get container status \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": rpc error: code = NotFound desc = could not find container \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": container with ID starting with 4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.135178 4822 scope.go:117] "RemoveContainer" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.137691 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": container with ID starting with 3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50 not found: ID does not exist" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.137733 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} err="failed to get container status \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": rpc error: code = NotFound desc = could not find container \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": container with ID starting with 3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.137762 4822 scope.go:117] "RemoveContainer" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.138607 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": container with ID starting with 60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef not found: ID does not exist" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.138682 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} err="failed to get container status \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": rpc error: code = NotFound desc = could not find container \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": container with ID starting with 60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.138724 4822 scope.go:117] "RemoveContainer" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" Dec 01 07:03:06 crc kubenswrapper[4822]: E1201 07:03:06.141292 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": container with ID starting with 6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560 not found: ID does not exist" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.141388 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} err="failed to get container status \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": rpc error: code = NotFound desc = could not find container \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": container with ID starting with 6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.141482 4822 scope.go:117] "RemoveContainer" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.142084 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} err="failed to get container status \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": rpc error: code = NotFound desc = could not find container \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": container with ID starting with a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.142121 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.142567 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} err="failed to get container status \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": rpc error: code = NotFound desc = could not find container \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": container with ID starting with 3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.142602 4822 scope.go:117] "RemoveContainer" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.143009 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} err="failed to get container status \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": rpc error: code = NotFound desc = could not find container \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": container with ID starting with e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.143039 4822 scope.go:117] "RemoveContainer" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.143363 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} err="failed to get container status \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": rpc error: code = NotFound desc = could not find container \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": container with ID starting with c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.143395 4822 scope.go:117] "RemoveContainer" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.143968 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} err="failed to get container status \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": rpc error: code = NotFound desc = could not find container \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": container with ID starting with 2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.144005 4822 scope.go:117] "RemoveContainer" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.144306 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} err="failed to get container status \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": rpc error: code = NotFound desc = could not find container \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": container with ID starting with 50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.144339 4822 scope.go:117] "RemoveContainer" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.144660 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} err="failed to get container status \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": rpc error: code = NotFound desc = could not find container \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": container with ID starting with 4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.144693 4822 scope.go:117] "RemoveContainer" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.145213 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} err="failed to get container status \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": rpc error: code = NotFound desc = could not find container \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": container with ID starting with 3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.145250 4822 scope.go:117] "RemoveContainer" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.145810 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} err="failed to get container status \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": rpc error: code = NotFound desc = could not find container \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": container with ID starting with 60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.145850 4822 scope.go:117] "RemoveContainer" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.146339 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} err="failed to get container status \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": rpc error: code = NotFound desc = could not find container \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": container with ID starting with 6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.146372 4822 scope.go:117] "RemoveContainer" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.146825 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} err="failed to get container status \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": rpc error: code = NotFound desc = could not find container \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": container with ID starting with a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.146859 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.147156 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} err="failed to get container status \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": rpc error: code = NotFound desc = could not find container \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": container with ID starting with 3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.147189 4822 scope.go:117] "RemoveContainer" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.147446 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} err="failed to get container status \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": rpc error: code = NotFound desc = could not find container \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": container with ID starting with e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.147481 4822 scope.go:117] "RemoveContainer" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.147805 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} err="failed to get container status \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": rpc error: code = NotFound desc = could not find container \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": container with ID starting with c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.147849 4822 scope.go:117] "RemoveContainer" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.148142 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} err="failed to get container status \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": rpc error: code = NotFound desc = could not find container \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": container with ID starting with 2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.148175 4822 scope.go:117] "RemoveContainer" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.148484 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} err="failed to get container status \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": rpc error: code = NotFound desc = could not find container \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": container with ID starting with 50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.148517 4822 scope.go:117] "RemoveContainer" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.148806 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} err="failed to get container status \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": rpc error: code = NotFound desc = could not find container \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": container with ID starting with 4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.148838 4822 scope.go:117] "RemoveContainer" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.149230 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} err="failed to get container status \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": rpc error: code = NotFound desc = could not find container \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": container with ID starting with 3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.149265 4822 scope.go:117] "RemoveContainer" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.149594 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} err="failed to get container status \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": rpc error: code = NotFound desc = could not find container \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": container with ID starting with 60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.149624 4822 scope.go:117] "RemoveContainer" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.149971 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} err="failed to get container status \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": rpc error: code = NotFound desc = could not find container \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": container with ID starting with 6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150007 4822 scope.go:117] "RemoveContainer" containerID="a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150262 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8"} err="failed to get container status \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": rpc error: code = NotFound desc = could not find container \"a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8\": container with ID starting with a3eaebd14e775062548d388e15eaca9b42d1076395074c95ae0d89e77cbcd3f8 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150295 4822 scope.go:117] "RemoveContainer" containerID="3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150592 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143"} err="failed to get container status \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": rpc error: code = NotFound desc = could not find container \"3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143\": container with ID starting with 3e776720b2dd9dcd3f5be339231b89d29431d7c0ec2270aeec1a6b27b2920143 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150633 4822 scope.go:117] "RemoveContainer" containerID="e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150851 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382"} err="failed to get container status \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": rpc error: code = NotFound desc = could not find container \"e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382\": container with ID starting with e68b7cb891ef977411e71224ef9e9381bb6c998d51a2a9c96c83560325caa382 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.150883 4822 scope.go:117] "RemoveContainer" containerID="c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.152038 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1"} err="failed to get container status \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": rpc error: code = NotFound desc = could not find container \"c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1\": container with ID starting with c47714cd8318f95cc36796366a1558b6fa9e3605210925520afcb3af12b261b1 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.152076 4822 scope.go:117] "RemoveContainer" containerID="2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.152316 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281"} err="failed to get container status \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": rpc error: code = NotFound desc = could not find container \"2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281\": container with ID starting with 2f426532d550514961e1fd373498ce42c6fe8e746c5fd16cee592c82469c4281 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.152351 4822 scope.go:117] "RemoveContainer" containerID="50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.152628 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca"} err="failed to get container status \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": rpc error: code = NotFound desc = could not find container \"50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca\": container with ID starting with 50e9c6483489eb4ad522fb58f9870123cf20417865f775f2665961ad465c86ca not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.152660 4822 scope.go:117] "RemoveContainer" containerID="4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.153008 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade"} err="failed to get container status \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": rpc error: code = NotFound desc = could not find container \"4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade\": container with ID starting with 4aaf7dc74d0e708663afd721c01fc02e87eef14305adbe54bbbe6223fe8bbade not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.153848 4822 scope.go:117] "RemoveContainer" containerID="3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.154469 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50"} err="failed to get container status \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": rpc error: code = NotFound desc = could not find container \"3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50\": container with ID starting with 3cac8956207330e802b77d0a0e7acde11f6520edcd32215d8e0aec94fb23bf50 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.154536 4822 scope.go:117] "RemoveContainer" containerID="60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.154885 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef"} err="failed to get container status \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": rpc error: code = NotFound desc = could not find container \"60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef\": container with ID starting with 60e6a40cfebccdf9fac82d85be49a61dd6f353314e7a98ac25231b19bd00c6ef not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.154921 4822 scope.go:117] "RemoveContainer" containerID="6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.155364 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560"} err="failed to get container status \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": rpc error: code = NotFound desc = could not find container \"6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560\": container with ID starting with 6c8f149b3730a1239b1f76d459ec97663e1ce9307a523111fe045d5f05c0d560 not found: ID does not exist" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.933269 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-b6tpr_34b58185-4742-4187-9243-860433c413d8/kube-multus/2.log" Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.933418 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-b6tpr" event={"ID":"34b58185-4742-4187-9243-860433c413d8","Type":"ContainerStarted","Data":"1100ca80ce10e3f78c64f117e07aa28c1e68e12f84618739e59510fc8185f969"} Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.937598 4822 generic.go:334] "Generic (PLEG): container finished" podID="d1375b56-b05b-484a-aa16-69f33b4c6c6c" containerID="e39173e5ca6c70c0fa5f094303962e3ce9f3b3d5ed9835189b51855353ea7048" exitCode=0 Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.937688 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerDied","Data":"e39173e5ca6c70c0fa5f094303962e3ce9f3b3d5ed9835189b51855353ea7048"} Dec 01 07:03:06 crc kubenswrapper[4822]: I1201 07:03:06.965455 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8284d339-ff12-453a-be42-4540e44252ee" path="/var/lib/kubelet/pods/8284d339-ff12-453a-be42-4540e44252ee/volumes" Dec 01 07:03:07 crc kubenswrapper[4822]: I1201 07:03:07.946449 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"7da33a2612ce433801e0a7ea8fa4a288dd9e84a1e96d775f813520d6fb19c4fe"} Dec 01 07:03:07 crc kubenswrapper[4822]: I1201 07:03:07.947440 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"9685a8301ff47219f80000f5d29148a0f5ad9a013dce0fbc86ddaeced83f0e87"} Dec 01 07:03:07 crc kubenswrapper[4822]: I1201 07:03:07.947456 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"cc86f9dbf3c0f907ed90b68ef7ff3ed3059e79fdb7d4a2f63187f5ef4864917f"} Dec 01 07:03:07 crc kubenswrapper[4822]: I1201 07:03:07.947465 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"038cf0831c85cb1d18ab77a75164e8ac2332777bc97dbf80ad76a7aa68ee3532"} Dec 01 07:03:07 crc kubenswrapper[4822]: I1201 07:03:07.947486 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"4463ab489a8747a54957828e111204eb99f78539e547d084e54bd815bd01f278"} Dec 01 07:03:07 crc kubenswrapper[4822]: I1201 07:03:07.947495 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"6da93fb15a9b5b43b863cc1c7f69065ad25fcdb62e81e5907ba74525aed3052d"} Dec 01 07:03:10 crc kubenswrapper[4822]: I1201 07:03:10.976022 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"88d83195be9f13056dd80d0af1624b51e945461d77a7bfab2f3af2f0a7dd7c7c"} Dec 01 07:03:12 crc kubenswrapper[4822]: I1201 07:03:12.543955 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:03:12 crc kubenswrapper[4822]: I1201 07:03:12.544504 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:03:12 crc kubenswrapper[4822]: I1201 07:03:12.996420 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" event={"ID":"d1375b56-b05b-484a-aa16-69f33b4c6c6c","Type":"ContainerStarted","Data":"5d21915a16e60b433fcc52f0562455329f9f53a0f607d3ac856014defa5a6ff1"} Dec 01 07:03:14 crc kubenswrapper[4822]: I1201 07:03:14.001806 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:14 crc kubenswrapper[4822]: I1201 07:03:14.002041 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:14 crc kubenswrapper[4822]: I1201 07:03:14.002075 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:14 crc kubenswrapper[4822]: I1201 07:03:14.037664 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" podStartSLOduration=9.037633227 podStartE2EDuration="9.037633227s" podCreationTimestamp="2025-12-01 07:03:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:03:14.036742333 +0000 UTC m=+749.357550019" watchObservedRunningTime="2025-12-01 07:03:14.037633227 +0000 UTC m=+749.358440913" Dec 01 07:03:14 crc kubenswrapper[4822]: I1201 07:03:14.043230 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:14 crc kubenswrapper[4822]: I1201 07:03:14.048824 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.531136 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-xd6t4"] Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.532597 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.536369 4822 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-xwxqj" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.536985 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.536999 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.537381 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.545455 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xd6t4"] Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.632019 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/049bc621-4bdc-4c58-9151-6506548287c1-crc-storage\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.632083 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/049bc621-4bdc-4c58-9151-6506548287c1-node-mnt\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.632130 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d89tf\" (UniqueName: \"kubernetes.io/projected/049bc621-4bdc-4c58-9151-6506548287c1-kube-api-access-d89tf\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.733428 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/049bc621-4bdc-4c58-9151-6506548287c1-node-mnt\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.733492 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d89tf\" (UniqueName: \"kubernetes.io/projected/049bc621-4bdc-4c58-9151-6506548287c1-kube-api-access-d89tf\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.733607 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/049bc621-4bdc-4c58-9151-6506548287c1-crc-storage\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.734006 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/049bc621-4bdc-4c58-9151-6506548287c1-node-mnt\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.735258 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/049bc621-4bdc-4c58-9151-6506548287c1-crc-storage\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.763434 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d89tf\" (UniqueName: \"kubernetes.io/projected/049bc621-4bdc-4c58-9151-6506548287c1-kube-api-access-d89tf\") pod \"crc-storage-crc-xd6t4\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: I1201 07:03:15.867396 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: E1201 07:03:15.911991 4822 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(a57acfe1c9a527d33b738fcbf216d8aae581486c6d978c04aeca5b9bfcb26a27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 07:03:15 crc kubenswrapper[4822]: E1201 07:03:15.912114 4822 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(a57acfe1c9a527d33b738fcbf216d8aae581486c6d978c04aeca5b9bfcb26a27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: E1201 07:03:15.912166 4822 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(a57acfe1c9a527d33b738fcbf216d8aae581486c6d978c04aeca5b9bfcb26a27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:15 crc kubenswrapper[4822]: E1201 07:03:15.912269 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-xd6t4_crc-storage(049bc621-4bdc-4c58-9151-6506548287c1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-xd6t4_crc-storage(049bc621-4bdc-4c58-9151-6506548287c1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(a57acfe1c9a527d33b738fcbf216d8aae581486c6d978c04aeca5b9bfcb26a27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-xd6t4" podUID="049bc621-4bdc-4c58-9151-6506548287c1" Dec 01 07:03:16 crc kubenswrapper[4822]: I1201 07:03:16.014168 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:16 crc kubenswrapper[4822]: I1201 07:03:16.015801 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:16 crc kubenswrapper[4822]: E1201 07:03:16.069302 4822 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(3daffe045ad0921e1e07271ce721ae4c48349da9b1503793d88797cf92dd3f94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 07:03:16 crc kubenswrapper[4822]: E1201 07:03:16.069373 4822 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(3daffe045ad0921e1e07271ce721ae4c48349da9b1503793d88797cf92dd3f94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:16 crc kubenswrapper[4822]: E1201 07:03:16.069399 4822 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(3daffe045ad0921e1e07271ce721ae4c48349da9b1503793d88797cf92dd3f94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:16 crc kubenswrapper[4822]: E1201 07:03:16.069454 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-xd6t4_crc-storage(049bc621-4bdc-4c58-9151-6506548287c1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-xd6t4_crc-storage(049bc621-4bdc-4c58-9151-6506548287c1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xd6t4_crc-storage_049bc621-4bdc-4c58-9151-6506548287c1_0(3daffe045ad0921e1e07271ce721ae4c48349da9b1503793d88797cf92dd3f94): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-xd6t4" podUID="049bc621-4bdc-4c58-9151-6506548287c1" Dec 01 07:03:16 crc kubenswrapper[4822]: I1201 07:03:16.252665 4822 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 07:03:27 crc kubenswrapper[4822]: I1201 07:03:27.950507 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:27 crc kubenswrapper[4822]: I1201 07:03:27.952603 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:28 crc kubenswrapper[4822]: I1201 07:03:28.190256 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xd6t4"] Dec 01 07:03:28 crc kubenswrapper[4822]: I1201 07:03:28.206750 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:03:29 crc kubenswrapper[4822]: I1201 07:03:29.122917 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xd6t4" event={"ID":"049bc621-4bdc-4c58-9151-6506548287c1","Type":"ContainerStarted","Data":"456d65b74943ac8a159bd8a30595ae33fb42498228dc05993129049987939332"} Dec 01 07:03:30 crc kubenswrapper[4822]: I1201 07:03:30.132268 4822 generic.go:334] "Generic (PLEG): container finished" podID="049bc621-4bdc-4c58-9151-6506548287c1" containerID="4aa300e393bbab05fdf787c7f6c70c5f66f11f0765b53911efc5d0f0267cd733" exitCode=0 Dec 01 07:03:30 crc kubenswrapper[4822]: I1201 07:03:30.132368 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xd6t4" event={"ID":"049bc621-4bdc-4c58-9151-6506548287c1","Type":"ContainerDied","Data":"4aa300e393bbab05fdf787c7f6c70c5f66f11f0765b53911efc5d0f0267cd733"} Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.378446 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.453278 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/049bc621-4bdc-4c58-9151-6506548287c1-node-mnt\") pod \"049bc621-4bdc-4c58-9151-6506548287c1\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.453389 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/049bc621-4bdc-4c58-9151-6506548287c1-crc-storage\") pod \"049bc621-4bdc-4c58-9151-6506548287c1\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.453422 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/049bc621-4bdc-4c58-9151-6506548287c1-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "049bc621-4bdc-4c58-9151-6506548287c1" (UID: "049bc621-4bdc-4c58-9151-6506548287c1"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.453570 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d89tf\" (UniqueName: \"kubernetes.io/projected/049bc621-4bdc-4c58-9151-6506548287c1-kube-api-access-d89tf\") pod \"049bc621-4bdc-4c58-9151-6506548287c1\" (UID: \"049bc621-4bdc-4c58-9151-6506548287c1\") " Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.453930 4822 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/049bc621-4bdc-4c58-9151-6506548287c1-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.458891 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/049bc621-4bdc-4c58-9151-6506548287c1-kube-api-access-d89tf" (OuterVolumeSpecName: "kube-api-access-d89tf") pod "049bc621-4bdc-4c58-9151-6506548287c1" (UID: "049bc621-4bdc-4c58-9151-6506548287c1"). InnerVolumeSpecName "kube-api-access-d89tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.466022 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/049bc621-4bdc-4c58-9151-6506548287c1-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "049bc621-4bdc-4c58-9151-6506548287c1" (UID: "049bc621-4bdc-4c58-9151-6506548287c1"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.555697 4822 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/049bc621-4bdc-4c58-9151-6506548287c1-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:31 crc kubenswrapper[4822]: I1201 07:03:31.555740 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d89tf\" (UniqueName: \"kubernetes.io/projected/049bc621-4bdc-4c58-9151-6506548287c1-kube-api-access-d89tf\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:32 crc kubenswrapper[4822]: I1201 07:03:32.148875 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xd6t4" event={"ID":"049bc621-4bdc-4c58-9151-6506548287c1","Type":"ContainerDied","Data":"456d65b74943ac8a159bd8a30595ae33fb42498228dc05993129049987939332"} Dec 01 07:03:32 crc kubenswrapper[4822]: I1201 07:03:32.148944 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="456d65b74943ac8a159bd8a30595ae33fb42498228dc05993129049987939332" Dec 01 07:03:32 crc kubenswrapper[4822]: I1201 07:03:32.148963 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xd6t4" Dec 01 07:03:35 crc kubenswrapper[4822]: I1201 07:03:35.788302 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bgbks" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.631337 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf"] Dec 01 07:03:38 crc kubenswrapper[4822]: E1201 07:03:38.631591 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="049bc621-4bdc-4c58-9151-6506548287c1" containerName="storage" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.631608 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="049bc621-4bdc-4c58-9151-6506548287c1" containerName="storage" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.631736 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="049bc621-4bdc-4c58-9151-6506548287c1" containerName="storage" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.632651 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.635281 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.647058 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf"] Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.690939 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58wx6\" (UniqueName: \"kubernetes.io/projected/54b66714-bfae-4121-932c-e03181665394-kube-api-access-58wx6\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.691043 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.691126 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.792909 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.793036 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58wx6\" (UniqueName: \"kubernetes.io/projected/54b66714-bfae-4121-932c-e03181665394-kube-api-access-58wx6\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.793084 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.793503 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.793747 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.813608 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58wx6\" (UniqueName: \"kubernetes.io/projected/54b66714-bfae-4121-932c-e03181665394-kube-api-access-58wx6\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:38 crc kubenswrapper[4822]: I1201 07:03:38.954130 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:39 crc kubenswrapper[4822]: I1201 07:03:39.888450 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf"] Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.139313 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rwvr6"] Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.141422 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.154101 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rwvr6"] Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.202440 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" event={"ID":"54b66714-bfae-4121-932c-e03181665394","Type":"ContainerStarted","Data":"c1252802450797611bec0bcc1a239f822f1e9e8a5a4e786c4dfdb42e8f933dac"} Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.202498 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" event={"ID":"54b66714-bfae-4121-932c-e03181665394","Type":"ContainerStarted","Data":"5f43b4cfac5238ba083ef1b8425c4706a573c0447c50f270ac3b14b1e330dc11"} Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.212345 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-utilities\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.212424 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rbff\" (UniqueName: \"kubernetes.io/projected/0acc0cd9-c969-4825-9346-6d6455d582f6-kube-api-access-7rbff\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.212445 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-catalog-content\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.313580 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-catalog-content\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.313709 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-utilities\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.313772 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rbff\" (UniqueName: \"kubernetes.io/projected/0acc0cd9-c969-4825-9346-6d6455d582f6-kube-api-access-7rbff\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.314111 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-catalog-content\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.314300 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-utilities\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.341764 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rbff\" (UniqueName: \"kubernetes.io/projected/0acc0cd9-c969-4825-9346-6d6455d582f6-kube-api-access-7rbff\") pod \"redhat-operators-rwvr6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.468588 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:40 crc kubenswrapper[4822]: I1201 07:03:40.709565 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rwvr6"] Dec 01 07:03:40 crc kubenswrapper[4822]: W1201 07:03:40.718739 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0acc0cd9_c969_4825_9346_6d6455d582f6.slice/crio-4201af4778c634ac16792e469ec3761ba2f400e1719569aaa5b3afd702c51888 WatchSource:0}: Error finding container 4201af4778c634ac16792e469ec3761ba2f400e1719569aaa5b3afd702c51888: Status 404 returned error can't find the container with id 4201af4778c634ac16792e469ec3761ba2f400e1719569aaa5b3afd702c51888 Dec 01 07:03:41 crc kubenswrapper[4822]: I1201 07:03:41.208072 4822 generic.go:334] "Generic (PLEG): container finished" podID="54b66714-bfae-4121-932c-e03181665394" containerID="c1252802450797611bec0bcc1a239f822f1e9e8a5a4e786c4dfdb42e8f933dac" exitCode=0 Dec 01 07:03:41 crc kubenswrapper[4822]: I1201 07:03:41.208401 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" event={"ID":"54b66714-bfae-4121-932c-e03181665394","Type":"ContainerDied","Data":"c1252802450797611bec0bcc1a239f822f1e9e8a5a4e786c4dfdb42e8f933dac"} Dec 01 07:03:41 crc kubenswrapper[4822]: I1201 07:03:41.214041 4822 generic.go:334] "Generic (PLEG): container finished" podID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerID="a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96" exitCode=0 Dec 01 07:03:41 crc kubenswrapper[4822]: I1201 07:03:41.214087 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rwvr6" event={"ID":"0acc0cd9-c969-4825-9346-6d6455d582f6","Type":"ContainerDied","Data":"a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96"} Dec 01 07:03:41 crc kubenswrapper[4822]: I1201 07:03:41.214117 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rwvr6" event={"ID":"0acc0cd9-c969-4825-9346-6d6455d582f6","Type":"ContainerStarted","Data":"4201af4778c634ac16792e469ec3761ba2f400e1719569aaa5b3afd702c51888"} Dec 01 07:03:42 crc kubenswrapper[4822]: I1201 07:03:42.543383 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:03:42 crc kubenswrapper[4822]: I1201 07:03:42.543483 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:03:42 crc kubenswrapper[4822]: I1201 07:03:42.543598 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:03:42 crc kubenswrapper[4822]: I1201 07:03:42.544373 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"338b930b0a920daac3d47f96c28535256b673fb917325f97766ffd05e922ff93"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:03:42 crc kubenswrapper[4822]: I1201 07:03:42.544452 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://338b930b0a920daac3d47f96c28535256b673fb917325f97766ffd05e922ff93" gracePeriod=600 Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.234413 4822 generic.go:334] "Generic (PLEG): container finished" podID="54b66714-bfae-4121-932c-e03181665394" containerID="fc334dfddfecd3ce058204b1ac9e972b9917d37dedccfc83250ceffd6be8f9ab" exitCode=0 Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.234515 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" event={"ID":"54b66714-bfae-4121-932c-e03181665394","Type":"ContainerDied","Data":"fc334dfddfecd3ce058204b1ac9e972b9917d37dedccfc83250ceffd6be8f9ab"} Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.240205 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="338b930b0a920daac3d47f96c28535256b673fb917325f97766ffd05e922ff93" exitCode=0 Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.240279 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"338b930b0a920daac3d47f96c28535256b673fb917325f97766ffd05e922ff93"} Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.240345 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"6baf3d5e41a621d0e59cbb384ffe06f0de93d5916d8dc51ecf89a3a235ed2c54"} Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.240375 4822 scope.go:117] "RemoveContainer" containerID="3bc601827ebd06a71930384d1ac078a53dc216ae11dbb095771eb71fc08f11e8" Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.244843 4822 generic.go:334] "Generic (PLEG): container finished" podID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerID="f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a" exitCode=0 Dec 01 07:03:43 crc kubenswrapper[4822]: I1201 07:03:43.244920 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rwvr6" event={"ID":"0acc0cd9-c969-4825-9346-6d6455d582f6","Type":"ContainerDied","Data":"f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a"} Dec 01 07:03:44 crc kubenswrapper[4822]: I1201 07:03:44.258132 4822 generic.go:334] "Generic (PLEG): container finished" podID="54b66714-bfae-4121-932c-e03181665394" containerID="195be320b107a9b815db292d5bd096c3c1b04ec9b0daea7e326b3c25a92e4f22" exitCode=0 Dec 01 07:03:44 crc kubenswrapper[4822]: I1201 07:03:44.258225 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" event={"ID":"54b66714-bfae-4121-932c-e03181665394","Type":"ContainerDied","Data":"195be320b107a9b815db292d5bd096c3c1b04ec9b0daea7e326b3c25a92e4f22"} Dec 01 07:03:44 crc kubenswrapper[4822]: I1201 07:03:44.269413 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rwvr6" event={"ID":"0acc0cd9-c969-4825-9346-6d6455d582f6","Type":"ContainerStarted","Data":"70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1"} Dec 01 07:03:44 crc kubenswrapper[4822]: I1201 07:03:44.314736 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rwvr6" podStartSLOduration=1.516530125 podStartE2EDuration="4.314712639s" podCreationTimestamp="2025-12-01 07:03:40 +0000 UTC" firstStartedPulling="2025-12-01 07:03:41.217011172 +0000 UTC m=+776.537818858" lastFinishedPulling="2025-12-01 07:03:44.015193686 +0000 UTC m=+779.336001372" observedRunningTime="2025-12-01 07:03:44.30897459 +0000 UTC m=+779.629782286" watchObservedRunningTime="2025-12-01 07:03:44.314712639 +0000 UTC m=+779.635520365" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.514400 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.591488 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-bundle\") pod \"54b66714-bfae-4121-932c-e03181665394\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.591560 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58wx6\" (UniqueName: \"kubernetes.io/projected/54b66714-bfae-4121-932c-e03181665394-kube-api-access-58wx6\") pod \"54b66714-bfae-4121-932c-e03181665394\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.591615 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-util\") pod \"54b66714-bfae-4121-932c-e03181665394\" (UID: \"54b66714-bfae-4121-932c-e03181665394\") " Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.592666 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-bundle" (OuterVolumeSpecName: "bundle") pod "54b66714-bfae-4121-932c-e03181665394" (UID: "54b66714-bfae-4121-932c-e03181665394"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.598390 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54b66714-bfae-4121-932c-e03181665394-kube-api-access-58wx6" (OuterVolumeSpecName: "kube-api-access-58wx6") pod "54b66714-bfae-4121-932c-e03181665394" (UID: "54b66714-bfae-4121-932c-e03181665394"). InnerVolumeSpecName "kube-api-access-58wx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.602301 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-util" (OuterVolumeSpecName: "util") pod "54b66714-bfae-4121-932c-e03181665394" (UID: "54b66714-bfae-4121-932c-e03181665394"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.692629 4822 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-util\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.692661 4822 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/54b66714-bfae-4121-932c-e03181665394-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:45 crc kubenswrapper[4822]: I1201 07:03:45.692671 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58wx6\" (UniqueName: \"kubernetes.io/projected/54b66714-bfae-4121-932c-e03181665394-kube-api-access-58wx6\") on node \"crc\" DevicePath \"\"" Dec 01 07:03:46 crc kubenswrapper[4822]: I1201 07:03:46.287059 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" event={"ID":"54b66714-bfae-4121-932c-e03181665394","Type":"ContainerDied","Data":"5f43b4cfac5238ba083ef1b8425c4706a573c0447c50f270ac3b14b1e330dc11"} Dec 01 07:03:46 crc kubenswrapper[4822]: I1201 07:03:46.287402 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f43b4cfac5238ba083ef1b8425c4706a573c0447c50f270ac3b14b1e330dc11" Dec 01 07:03:46 crc kubenswrapper[4822]: I1201 07:03:46.287087 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.083299 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq"] Dec 01 07:03:50 crc kubenswrapper[4822]: E1201 07:03:50.084104 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="pull" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.084121 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="pull" Dec 01 07:03:50 crc kubenswrapper[4822]: E1201 07:03:50.084134 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="util" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.084141 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="util" Dec 01 07:03:50 crc kubenswrapper[4822]: E1201 07:03:50.084165 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="extract" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.084173 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="extract" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.084270 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="54b66714-bfae-4121-932c-e03181665394" containerName="extract" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.084818 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.087984 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.088214 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.090219 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-gwz6t" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.104386 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq"] Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.155035 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp2ck\" (UniqueName: \"kubernetes.io/projected/5869a4b0-9484-44bc-b62e-6e1a48bf87f6-kube-api-access-xp2ck\") pod \"nmstate-operator-5b5b58f5c8-dnlrq\" (UID: \"5869a4b0-9484-44bc-b62e-6e1a48bf87f6\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.256121 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp2ck\" (UniqueName: \"kubernetes.io/projected/5869a4b0-9484-44bc-b62e-6e1a48bf87f6-kube-api-access-xp2ck\") pod \"nmstate-operator-5b5b58f5c8-dnlrq\" (UID: \"5869a4b0-9484-44bc-b62e-6e1a48bf87f6\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.278381 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp2ck\" (UniqueName: \"kubernetes.io/projected/5869a4b0-9484-44bc-b62e-6e1a48bf87f6-kube-api-access-xp2ck\") pod \"nmstate-operator-5b5b58f5c8-dnlrq\" (UID: \"5869a4b0-9484-44bc-b62e-6e1a48bf87f6\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.404508 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.469101 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.469190 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:03:50 crc kubenswrapper[4822]: I1201 07:03:50.659317 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq"] Dec 01 07:03:51 crc kubenswrapper[4822]: I1201 07:03:51.321379 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" event={"ID":"5869a4b0-9484-44bc-b62e-6e1a48bf87f6","Type":"ContainerStarted","Data":"12ebb590464f5ea01f1728a548ccbf6d6bc6860535cd939aa636c0e36df64c0f"} Dec 01 07:03:51 crc kubenswrapper[4822]: I1201 07:03:51.522777 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rwvr6" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="registry-server" probeResult="failure" output=< Dec 01 07:03:51 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 07:03:51 crc kubenswrapper[4822]: > Dec 01 07:03:53 crc kubenswrapper[4822]: I1201 07:03:53.337711 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" event={"ID":"5869a4b0-9484-44bc-b62e-6e1a48bf87f6","Type":"ContainerStarted","Data":"e1c4d5f98d70d44152bc94bf0c4e9d10d65e2e52204e5a168ae191a3e826fc39"} Dec 01 07:03:53 crc kubenswrapper[4822]: I1201 07:03:53.360498 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-dnlrq" podStartSLOduration=1.299475496 podStartE2EDuration="3.3604805s" podCreationTimestamp="2025-12-01 07:03:50 +0000 UTC" firstStartedPulling="2025-12-01 07:03:50.673899558 +0000 UTC m=+785.994707244" lastFinishedPulling="2025-12-01 07:03:52.734904562 +0000 UTC m=+788.055712248" observedRunningTime="2025-12-01 07:03:53.360148821 +0000 UTC m=+788.680956547" watchObservedRunningTime="2025-12-01 07:03:53.3604805 +0000 UTC m=+788.681288196" Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.899259 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f"] Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.900578 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.906799 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-7dstm" Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.937079 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp"] Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.938094 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.939891 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.958932 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp"] Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.973751 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f"] Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.984478 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-cd8pn"] Dec 01 07:03:58 crc kubenswrapper[4822]: I1201 07:03:58.985299 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.000879 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjpzs\" (UniqueName: \"kubernetes.io/projected/6cf74d74-242e-44f7-a881-01ae90b1c7be-kube-api-access-kjpzs\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.000926 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnt5g\" (UniqueName: \"kubernetes.io/projected/005acf82-ce0c-4762-b26d-b16df2767b45-kube-api-access-hnt5g\") pod \"nmstate-metrics-7f946cbc9-kkt2f\" (UID: \"005acf82-ce0c-4762-b26d-b16df2767b45\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.001044 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6cf74d74-242e-44f7-a881-01ae90b1c7be-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.045914 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm"] Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.046752 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.053123 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fjwpg" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.054964 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.054987 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.063573 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm"] Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.102738 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn2t2\" (UniqueName: \"kubernetes.io/projected/fea47134-e547-45db-8461-8b4154f25225-kube-api-access-nn2t2\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.102795 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjpzs\" (UniqueName: \"kubernetes.io/projected/6cf74d74-242e-44f7-a881-01ae90b1c7be-kube-api-access-kjpzs\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.102815 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf5665f-7be9-4d4e-a750-611d44124963-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.102831 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bhpr\" (UniqueName: \"kubernetes.io/projected/3bf5665f-7be9-4d4e-a750-611d44124963-kube-api-access-5bhpr\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.102973 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnt5g\" (UniqueName: \"kubernetes.io/projected/005acf82-ce0c-4762-b26d-b16df2767b45-kube-api-access-hnt5g\") pod \"nmstate-metrics-7f946cbc9-kkt2f\" (UID: \"005acf82-ce0c-4762-b26d-b16df2767b45\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.103039 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/3bf5665f-7be9-4d4e-a750-611d44124963-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.103089 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-nmstate-lock\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.103162 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-dbus-socket\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.103264 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6cf74d74-242e-44f7-a881-01ae90b1c7be-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.103290 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-ovs-socket\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: E1201 07:03:59.103446 4822 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 01 07:03:59 crc kubenswrapper[4822]: E1201 07:03:59.103506 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cf74d74-242e-44f7-a881-01ae90b1c7be-tls-key-pair podName:6cf74d74-242e-44f7-a881-01ae90b1c7be nodeName:}" failed. No retries permitted until 2025-12-01 07:03:59.603485884 +0000 UTC m=+794.924293570 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/6cf74d74-242e-44f7-a881-01ae90b1c7be-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-nwxmp" (UID: "6cf74d74-242e-44f7-a881-01ae90b1c7be") : secret "openshift-nmstate-webhook" not found Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.122996 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjpzs\" (UniqueName: \"kubernetes.io/projected/6cf74d74-242e-44f7-a881-01ae90b1c7be-kube-api-access-kjpzs\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.129324 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnt5g\" (UniqueName: \"kubernetes.io/projected/005acf82-ce0c-4762-b26d-b16df2767b45-kube-api-access-hnt5g\") pod \"nmstate-metrics-7f946cbc9-kkt2f\" (UID: \"005acf82-ce0c-4762-b26d-b16df2767b45\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205173 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/3bf5665f-7be9-4d4e-a750-611d44124963-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205251 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-nmstate-lock\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205302 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-dbus-socket\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205369 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-ovs-socket\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205416 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn2t2\" (UniqueName: \"kubernetes.io/projected/fea47134-e547-45db-8461-8b4154f25225-kube-api-access-nn2t2\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205445 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf5665f-7be9-4d4e-a750-611d44124963-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.205468 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bhpr\" (UniqueName: \"kubernetes.io/projected/3bf5665f-7be9-4d4e-a750-611d44124963-kube-api-access-5bhpr\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.206053 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-nmstate-lock\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.206095 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-ovs-socket\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.206200 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fea47134-e547-45db-8461-8b4154f25225-dbus-socket\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.206776 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/3bf5665f-7be9-4d4e-a750-611d44124963-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.209374 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf5665f-7be9-4d4e-a750-611d44124963-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.215475 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.221494 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bhpr\" (UniqueName: \"kubernetes.io/projected/3bf5665f-7be9-4d4e-a750-611d44124963-kube-api-access-5bhpr\") pod \"nmstate-console-plugin-7fbb5f6569-v5nnm\" (UID: \"3bf5665f-7be9-4d4e-a750-611d44124963\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.234506 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn2t2\" (UniqueName: \"kubernetes.io/projected/fea47134-e547-45db-8461-8b4154f25225-kube-api-access-nn2t2\") pod \"nmstate-handler-cd8pn\" (UID: \"fea47134-e547-45db-8461-8b4154f25225\") " pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.246252 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5dddd7d494-tmgx7"] Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.247118 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.255094 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5dddd7d494-tmgx7"] Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.301176 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306149 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-trusted-ca-bundle\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306178 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-config\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306228 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-oauth-config\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306252 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-serving-cert\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306271 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftkm2\" (UniqueName: \"kubernetes.io/projected/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-kube-api-access-ftkm2\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306289 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-oauth-serving-cert\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.306313 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-service-ca\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.363375 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.380705 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cd8pn" event={"ID":"fea47134-e547-45db-8461-8b4154f25225","Type":"ContainerStarted","Data":"7aa6ce0a1846c55c2a759eba1b61a4e334b37d953c026591a0f690b0c56782ca"} Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408021 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-trusted-ca-bundle\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408078 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-config\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408140 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-oauth-config\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408172 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-serving-cert\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408200 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftkm2\" (UniqueName: \"kubernetes.io/projected/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-kube-api-access-ftkm2\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408226 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-oauth-serving-cert\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.408262 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-service-ca\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.409333 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-service-ca\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.409654 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-trusted-ca-bundle\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.411025 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-config\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.411948 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-oauth-serving-cert\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.413500 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-serving-cert\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.414197 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-console-oauth-config\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.427462 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftkm2\" (UniqueName: \"kubernetes.io/projected/6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b-kube-api-access-ftkm2\") pod \"console-5dddd7d494-tmgx7\" (UID: \"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b\") " pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.429026 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f"] Dec 01 07:03:59 crc kubenswrapper[4822]: W1201 07:03:59.438395 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod005acf82_ce0c_4762_b26d_b16df2767b45.slice/crio-405ca4a50492b78c9537ffdb79dc13a68e41b6cb822fb4eeb8ecf712f6a3eb63 WatchSource:0}: Error finding container 405ca4a50492b78c9537ffdb79dc13a68e41b6cb822fb4eeb8ecf712f6a3eb63: Status 404 returned error can't find the container with id 405ca4a50492b78c9537ffdb79dc13a68e41b6cb822fb4eeb8ecf712f6a3eb63 Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.560256 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm"] Dec 01 07:03:59 crc kubenswrapper[4822]: W1201 07:03:59.571376 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bf5665f_7be9_4d4e_a750_611d44124963.slice/crio-f9ba51f6d435bbcf980f551ab7af9a84ad36d64181806f5479c1653c0b9acdbb WatchSource:0}: Error finding container f9ba51f6d435bbcf980f551ab7af9a84ad36d64181806f5479c1653c0b9acdbb: Status 404 returned error can't find the container with id f9ba51f6d435bbcf980f551ab7af9a84ad36d64181806f5479c1653c0b9acdbb Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.589782 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.611278 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6cf74d74-242e-44f7-a881-01ae90b1c7be-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.615906 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6cf74d74-242e-44f7-a881-01ae90b1c7be-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-nwxmp\" (UID: \"6cf74d74-242e-44f7-a881-01ae90b1c7be\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.803892 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5dddd7d494-tmgx7"] Dec 01 07:03:59 crc kubenswrapper[4822]: W1201 07:03:59.811851 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e16dd75_9e1c_4ae5_ad41_7e3fa4620d8b.slice/crio-1654897430deedf1049e6958c65b8d3301426cfd5dc04aa876f259649eaed736 WatchSource:0}: Error finding container 1654897430deedf1049e6958c65b8d3301426cfd5dc04aa876f259649eaed736: Status 404 returned error can't find the container with id 1654897430deedf1049e6958c65b8d3301426cfd5dc04aa876f259649eaed736 Dec 01 07:03:59 crc kubenswrapper[4822]: I1201 07:03:59.853297 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.061993 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp"] Dec 01 07:04:00 crc kubenswrapper[4822]: W1201 07:04:00.070359 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cf74d74_242e_44f7_a881_01ae90b1c7be.slice/crio-6eac6d9036c1ef4651438e0247dfaed25fa908ec730343353533ff1475a260b2 WatchSource:0}: Error finding container 6eac6d9036c1ef4651438e0247dfaed25fa908ec730343353533ff1475a260b2: Status 404 returned error can't find the container with id 6eac6d9036c1ef4651438e0247dfaed25fa908ec730343353533ff1475a260b2 Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.387113 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" event={"ID":"005acf82-ce0c-4762-b26d-b16df2767b45","Type":"ContainerStarted","Data":"405ca4a50492b78c9537ffdb79dc13a68e41b6cb822fb4eeb8ecf712f6a3eb63"} Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.387946 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" event={"ID":"6cf74d74-242e-44f7-a881-01ae90b1c7be","Type":"ContainerStarted","Data":"6eac6d9036c1ef4651438e0247dfaed25fa908ec730343353533ff1475a260b2"} Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.389542 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5dddd7d494-tmgx7" event={"ID":"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b","Type":"ContainerStarted","Data":"7b6add15332febebcfa2bb2e2dfae398b1ed29a787e9485f35da9d853e9c240b"} Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.389582 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5dddd7d494-tmgx7" event={"ID":"6e16dd75-9e1c-4ae5-ad41-7e3fa4620d8b","Type":"ContainerStarted","Data":"1654897430deedf1049e6958c65b8d3301426cfd5dc04aa876f259649eaed736"} Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.391751 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" event={"ID":"3bf5665f-7be9-4d4e-a750-611d44124963","Type":"ContainerStarted","Data":"f9ba51f6d435bbcf980f551ab7af9a84ad36d64181806f5479c1653c0b9acdbb"} Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.412058 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5dddd7d494-tmgx7" podStartSLOduration=1.412036294 podStartE2EDuration="1.412036294s" podCreationTimestamp="2025-12-01 07:03:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:04:00.406235802 +0000 UTC m=+795.727043498" watchObservedRunningTime="2025-12-01 07:04:00.412036294 +0000 UTC m=+795.732843980" Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.508242 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.557270 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:04:00 crc kubenswrapper[4822]: I1201 07:04:00.741623 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rwvr6"] Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.407226 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" event={"ID":"6cf74d74-242e-44f7-a881-01ae90b1c7be","Type":"ContainerStarted","Data":"43bfc81732c03a7a8b7dce43ea5494eb2d5bb44e1d3d43f9870482fabe0cfac7"} Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.407858 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.410141 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" event={"ID":"3bf5665f-7be9-4d4e-a750-611d44124963","Type":"ContainerStarted","Data":"b02e8542c49fdc365cfc631266c71dfa96fe767e1f137c0ee87b562deef5364d"} Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.412311 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rwvr6" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="registry-server" containerID="cri-o://70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1" gracePeriod=2 Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.412589 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" event={"ID":"005acf82-ce0c-4762-b26d-b16df2767b45","Type":"ContainerStarted","Data":"c91238b70a52ef23c8ae3b4bb1868c74f49093e78a86285515717cd2c34831d3"} Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.438531 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" podStartSLOduration=2.376803528 podStartE2EDuration="4.438501933s" podCreationTimestamp="2025-12-01 07:03:58 +0000 UTC" firstStartedPulling="2025-12-01 07:04:00.072619215 +0000 UTC m=+795.393426901" lastFinishedPulling="2025-12-01 07:04:02.13431761 +0000 UTC m=+797.455125306" observedRunningTime="2025-12-01 07:04:02.435721865 +0000 UTC m=+797.756529561" watchObservedRunningTime="2025-12-01 07:04:02.438501933 +0000 UTC m=+797.759309619" Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.471249 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-v5nnm" podStartSLOduration=0.911942724 podStartE2EDuration="3.471222188s" podCreationTimestamp="2025-12-01 07:03:59 +0000 UTC" firstStartedPulling="2025-12-01 07:03:59.573501523 +0000 UTC m=+794.894309209" lastFinishedPulling="2025-12-01 07:04:02.132780987 +0000 UTC m=+797.453588673" observedRunningTime="2025-12-01 07:04:02.469142449 +0000 UTC m=+797.789950135" watchObservedRunningTime="2025-12-01 07:04:02.471222188 +0000 UTC m=+797.792029874" Dec 01 07:04:02 crc kubenswrapper[4822]: I1201 07:04:02.811636 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.002593 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-utilities\") pod \"0acc0cd9-c969-4825-9346-6d6455d582f6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.002656 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-catalog-content\") pod \"0acc0cd9-c969-4825-9346-6d6455d582f6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.002811 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rbff\" (UniqueName: \"kubernetes.io/projected/0acc0cd9-c969-4825-9346-6d6455d582f6-kube-api-access-7rbff\") pod \"0acc0cd9-c969-4825-9346-6d6455d582f6\" (UID: \"0acc0cd9-c969-4825-9346-6d6455d582f6\") " Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.003810 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-utilities" (OuterVolumeSpecName: "utilities") pod "0acc0cd9-c969-4825-9346-6d6455d582f6" (UID: "0acc0cd9-c969-4825-9346-6d6455d582f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.010420 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0acc0cd9-c969-4825-9346-6d6455d582f6-kube-api-access-7rbff" (OuterVolumeSpecName: "kube-api-access-7rbff") pod "0acc0cd9-c969-4825-9346-6d6455d582f6" (UID: "0acc0cd9-c969-4825-9346-6d6455d582f6"). InnerVolumeSpecName "kube-api-access-7rbff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.103887 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.103914 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rbff\" (UniqueName: \"kubernetes.io/projected/0acc0cd9-c969-4825-9346-6d6455d582f6-kube-api-access-7rbff\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.115759 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0acc0cd9-c969-4825-9346-6d6455d582f6" (UID: "0acc0cd9-c969-4825-9346-6d6455d582f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.206156 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0acc0cd9-c969-4825-9346-6d6455d582f6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.426654 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cd8pn" event={"ID":"fea47134-e547-45db-8461-8b4154f25225","Type":"ContainerStarted","Data":"6580508cd66029b9c79ddc408066ddeb5c557b6a90133faab1200d17875c3f42"} Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.428624 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.434126 4822 generic.go:334] "Generic (PLEG): container finished" podID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerID="70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1" exitCode=0 Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.434212 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rwvr6" event={"ID":"0acc0cd9-c969-4825-9346-6d6455d582f6","Type":"ContainerDied","Data":"70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1"} Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.434295 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rwvr6" event={"ID":"0acc0cd9-c969-4825-9346-6d6455d582f6","Type":"ContainerDied","Data":"4201af4778c634ac16792e469ec3761ba2f400e1719569aaa5b3afd702c51888"} Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.434329 4822 scope.go:117] "RemoveContainer" containerID="70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.434243 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rwvr6" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.463350 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-cd8pn" podStartSLOduration=2.6551693 podStartE2EDuration="5.463309912s" podCreationTimestamp="2025-12-01 07:03:58 +0000 UTC" firstStartedPulling="2025-12-01 07:03:59.324649066 +0000 UTC m=+794.645456752" lastFinishedPulling="2025-12-01 07:04:02.132789678 +0000 UTC m=+797.453597364" observedRunningTime="2025-12-01 07:04:03.459271449 +0000 UTC m=+798.780079175" watchObservedRunningTime="2025-12-01 07:04:03.463309912 +0000 UTC m=+798.784117628" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.489059 4822 scope.go:117] "RemoveContainer" containerID="f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.511041 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rwvr6"] Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.529822 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rwvr6"] Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.532962 4822 scope.go:117] "RemoveContainer" containerID="a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.554071 4822 scope.go:117] "RemoveContainer" containerID="70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1" Dec 01 07:04:03 crc kubenswrapper[4822]: E1201 07:04:03.554648 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1\": container with ID starting with 70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1 not found: ID does not exist" containerID="70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.554688 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1"} err="failed to get container status \"70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1\": rpc error: code = NotFound desc = could not find container \"70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1\": container with ID starting with 70180e3b88195ec6920810b4353350f38ee3a550a7ea2cbdf4426f7d73fbdba1 not found: ID does not exist" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.554717 4822 scope.go:117] "RemoveContainer" containerID="f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a" Dec 01 07:04:03 crc kubenswrapper[4822]: E1201 07:04:03.555227 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a\": container with ID starting with f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a not found: ID does not exist" containerID="f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.555274 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a"} err="failed to get container status \"f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a\": rpc error: code = NotFound desc = could not find container \"f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a\": container with ID starting with f88966dbc9bb93e08e6aec38f12210e2adb15008ed784eb8600f9a222578348a not found: ID does not exist" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.555311 4822 scope.go:117] "RemoveContainer" containerID="a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96" Dec 01 07:04:03 crc kubenswrapper[4822]: E1201 07:04:03.555598 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96\": container with ID starting with a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96 not found: ID does not exist" containerID="a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96" Dec 01 07:04:03 crc kubenswrapper[4822]: I1201 07:04:03.555621 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96"} err="failed to get container status \"a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96\": rpc error: code = NotFound desc = could not find container \"a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96\": container with ID starting with a8a3b555c56b915d342cf8cbefb4c20428bd50b885f169271da2590f87d40f96 not found: ID does not exist" Dec 01 07:04:04 crc kubenswrapper[4822]: I1201 07:04:04.961771 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" path="/var/lib/kubelet/pods/0acc0cd9-c969-4825-9346-6d6455d582f6/volumes" Dec 01 07:04:05 crc kubenswrapper[4822]: I1201 07:04:05.453200 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" event={"ID":"005acf82-ce0c-4762-b26d-b16df2767b45","Type":"ContainerStarted","Data":"baa810483b79e3b64864ddf0d5c2409dcb76fada8d09d452ad6db8acd6e2906f"} Dec 01 07:04:05 crc kubenswrapper[4822]: I1201 07:04:05.482433 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kkt2f" podStartSLOduration=2.049609912 podStartE2EDuration="7.482404065s" podCreationTimestamp="2025-12-01 07:03:58 +0000 UTC" firstStartedPulling="2025-12-01 07:03:59.440098204 +0000 UTC m=+794.760905890" lastFinishedPulling="2025-12-01 07:04:04.872892347 +0000 UTC m=+800.193700043" observedRunningTime="2025-12-01 07:04:05.479009811 +0000 UTC m=+800.799817637" watchObservedRunningTime="2025-12-01 07:04:05.482404065 +0000 UTC m=+800.803211781" Dec 01 07:04:09 crc kubenswrapper[4822]: I1201 07:04:09.329972 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-cd8pn" Dec 01 07:04:09 crc kubenswrapper[4822]: I1201 07:04:09.590135 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:04:09 crc kubenswrapper[4822]: I1201 07:04:09.590297 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:04:09 crc kubenswrapper[4822]: I1201 07:04:09.598572 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:04:10 crc kubenswrapper[4822]: I1201 07:04:10.508335 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5dddd7d494-tmgx7" Dec 01 07:04:10 crc kubenswrapper[4822]: I1201 07:04:10.578966 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-ptzkb"] Dec 01 07:04:19 crc kubenswrapper[4822]: I1201 07:04:19.862419 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-nwxmp" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.351418 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls"] Dec 01 07:04:35 crc kubenswrapper[4822]: E1201 07:04:35.352795 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="extract-utilities" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.352819 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="extract-utilities" Dec 01 07:04:35 crc kubenswrapper[4822]: E1201 07:04:35.352859 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="registry-server" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.352868 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="registry-server" Dec 01 07:04:35 crc kubenswrapper[4822]: E1201 07:04:35.352881 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="extract-content" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.352894 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="extract-content" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.353032 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="0acc0cd9-c969-4825-9346-6d6455d582f6" containerName="registry-server" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.354321 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.356206 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.372769 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls"] Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.430039 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.430101 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.430194 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fsrb\" (UniqueName: \"kubernetes.io/projected/04f4b9dc-ac07-4730-9fed-3d02c5144397-kube-api-access-7fsrb\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.532007 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.532063 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.532154 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fsrb\" (UniqueName: \"kubernetes.io/projected/04f4b9dc-ac07-4730-9fed-3d02c5144397-kube-api-access-7fsrb\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.533009 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.534068 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.565699 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fsrb\" (UniqueName: \"kubernetes.io/projected/04f4b9dc-ac07-4730-9fed-3d02c5144397-kube-api-access-7fsrb\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.654695 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-ptzkb" podUID="839d69e8-399b-4c30-b64f-893327a389e7" containerName="console" containerID="cri-o://50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247" gracePeriod=15 Dec 01 07:04:35 crc kubenswrapper[4822]: I1201 07:04:35.682079 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.094239 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-ptzkb_839d69e8-399b-4c30-b64f-893327a389e7/console/0.log" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.094849 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143600 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtlj5\" (UniqueName: \"kubernetes.io/projected/839d69e8-399b-4c30-b64f-893327a389e7-kube-api-access-xtlj5\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143727 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-console-config\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143810 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-trusted-ca-bundle\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143858 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-oauth-serving-cert\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143895 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-service-ca\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143949 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-oauth-config\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.143981 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-serving-cert\") pod \"839d69e8-399b-4c30-b64f-893327a389e7\" (UID: \"839d69e8-399b-4c30-b64f-893327a389e7\") " Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.145217 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.145247 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-service-ca" (OuterVolumeSpecName: "service-ca") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.145445 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.146083 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-console-config" (OuterVolumeSpecName: "console-config") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.150794 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/839d69e8-399b-4c30-b64f-893327a389e7-kube-api-access-xtlj5" (OuterVolumeSpecName: "kube-api-access-xtlj5") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "kube-api-access-xtlj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.151317 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.152791 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "839d69e8-399b-4c30-b64f-893327a389e7" (UID: "839d69e8-399b-4c30-b64f-893327a389e7"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.185325 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls"] Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245696 4822 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245737 4822 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245750 4822 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245761 4822 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245802 4822 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/839d69e8-399b-4c30-b64f-893327a389e7-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245814 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtlj5\" (UniqueName: \"kubernetes.io/projected/839d69e8-399b-4c30-b64f-893327a389e7-kube-api-access-xtlj5\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.245826 4822 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/839d69e8-399b-4c30-b64f-893327a389e7-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:36 crc kubenswrapper[4822]: E1201 07:04:36.494212 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04f4b9dc_ac07_4730_9fed_3d02c5144397.slice/crio-3dd0d5ecdc6f68568abdfe2a60d186d944778a0c508e50ae3771ae6f55a3b198.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04f4b9dc_ac07_4730_9fed_3d02c5144397.slice/crio-conmon-3dd0d5ecdc6f68568abdfe2a60d186d944778a0c508e50ae3771ae6f55a3b198.scope\": RecentStats: unable to find data in memory cache]" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.690909 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-ptzkb_839d69e8-399b-4c30-b64f-893327a389e7/console/0.log" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.691617 4822 generic.go:334] "Generic (PLEG): container finished" podID="839d69e8-399b-4c30-b64f-893327a389e7" containerID="50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247" exitCode=2 Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.691650 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ptzkb" event={"ID":"839d69e8-399b-4c30-b64f-893327a389e7","Type":"ContainerDied","Data":"50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247"} Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.691764 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ptzkb" event={"ID":"839d69e8-399b-4c30-b64f-893327a389e7","Type":"ContainerDied","Data":"9ec9e48c7b095e820c45dff56d8ca643b8fa7d596bf79ba6f35a98f0e04bfcf0"} Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.691680 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ptzkb" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.691794 4822 scope.go:117] "RemoveContainer" containerID="50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.694937 4822 generic.go:334] "Generic (PLEG): container finished" podID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerID="3dd0d5ecdc6f68568abdfe2a60d186d944778a0c508e50ae3771ae6f55a3b198" exitCode=0 Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.695012 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" event={"ID":"04f4b9dc-ac07-4730-9fed-3d02c5144397","Type":"ContainerDied","Data":"3dd0d5ecdc6f68568abdfe2a60d186d944778a0c508e50ae3771ae6f55a3b198"} Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.695055 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" event={"ID":"04f4b9dc-ac07-4730-9fed-3d02c5144397","Type":"ContainerStarted","Data":"d8de014d95238bcc5be7d54ac4776bf59b9399f703457e50ad1bb084810eb9ad"} Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.731898 4822 scope.go:117] "RemoveContainer" containerID="50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247" Dec 01 07:04:36 crc kubenswrapper[4822]: E1201 07:04:36.733416 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247\": container with ID starting with 50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247 not found: ID does not exist" containerID="50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.733472 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247"} err="failed to get container status \"50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247\": rpc error: code = NotFound desc = could not find container \"50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247\": container with ID starting with 50b384e288558e8c24afac0ae519e8fdd41868b3b105d7b5c1fa0acda0ad9247 not found: ID does not exist" Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.755580 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-ptzkb"] Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.762804 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-ptzkb"] Dec 01 07:04:36 crc kubenswrapper[4822]: I1201 07:04:36.961149 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="839d69e8-399b-4c30-b64f-893327a389e7" path="/var/lib/kubelet/pods/839d69e8-399b-4c30-b64f-893327a389e7/volumes" Dec 01 07:04:38 crc kubenswrapper[4822]: I1201 07:04:38.716093 4822 generic.go:334] "Generic (PLEG): container finished" podID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerID="c133c583d63edbf6007f90172d8f1d737f8f49ffd8b9a102b0ce6681e4ff8cb5" exitCode=0 Dec 01 07:04:38 crc kubenswrapper[4822]: I1201 07:04:38.716269 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" event={"ID":"04f4b9dc-ac07-4730-9fed-3d02c5144397","Type":"ContainerDied","Data":"c133c583d63edbf6007f90172d8f1d737f8f49ffd8b9a102b0ce6681e4ff8cb5"} Dec 01 07:04:39 crc kubenswrapper[4822]: I1201 07:04:39.727614 4822 generic.go:334] "Generic (PLEG): container finished" podID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerID="8cfbb1db1c0571e928f3e98772a0880fbfb59be6a82d07860ea7a4fbccd1f975" exitCode=0 Dec 01 07:04:39 crc kubenswrapper[4822]: I1201 07:04:39.727702 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" event={"ID":"04f4b9dc-ac07-4730-9fed-3d02c5144397","Type":"ContainerDied","Data":"8cfbb1db1c0571e928f3e98772a0880fbfb59be6a82d07860ea7a4fbccd1f975"} Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.237203 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.360799 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-util\") pod \"04f4b9dc-ac07-4730-9fed-3d02c5144397\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.360854 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fsrb\" (UniqueName: \"kubernetes.io/projected/04f4b9dc-ac07-4730-9fed-3d02c5144397-kube-api-access-7fsrb\") pod \"04f4b9dc-ac07-4730-9fed-3d02c5144397\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.360901 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-bundle\") pod \"04f4b9dc-ac07-4730-9fed-3d02c5144397\" (UID: \"04f4b9dc-ac07-4730-9fed-3d02c5144397\") " Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.365610 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-bundle" (OuterVolumeSpecName: "bundle") pod "04f4b9dc-ac07-4730-9fed-3d02c5144397" (UID: "04f4b9dc-ac07-4730-9fed-3d02c5144397"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.372249 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04f4b9dc-ac07-4730-9fed-3d02c5144397-kube-api-access-7fsrb" (OuterVolumeSpecName: "kube-api-access-7fsrb") pod "04f4b9dc-ac07-4730-9fed-3d02c5144397" (UID: "04f4b9dc-ac07-4730-9fed-3d02c5144397"). InnerVolumeSpecName "kube-api-access-7fsrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.383960 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-util" (OuterVolumeSpecName: "util") pod "04f4b9dc-ac07-4730-9fed-3d02c5144397" (UID: "04f4b9dc-ac07-4730-9fed-3d02c5144397"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.462525 4822 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-util\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.462616 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fsrb\" (UniqueName: \"kubernetes.io/projected/04f4b9dc-ac07-4730-9fed-3d02c5144397-kube-api-access-7fsrb\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.462679 4822 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04f4b9dc-ac07-4730-9fed-3d02c5144397-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.749407 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" event={"ID":"04f4b9dc-ac07-4730-9fed-3d02c5144397","Type":"ContainerDied","Data":"d8de014d95238bcc5be7d54ac4776bf59b9399f703457e50ad1bb084810eb9ad"} Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.749998 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8de014d95238bcc5be7d54ac4776bf59b9399f703457e50ad1bb084810eb9ad" Dec 01 07:04:41 crc kubenswrapper[4822]: I1201 07:04:41.749516 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.512773 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284"] Dec 01 07:04:50 crc kubenswrapper[4822]: E1201 07:04:50.513929 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="extract" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.513944 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="extract" Dec 01 07:04:50 crc kubenswrapper[4822]: E1201 07:04:50.513960 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="839d69e8-399b-4c30-b64f-893327a389e7" containerName="console" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.513965 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="839d69e8-399b-4c30-b64f-893327a389e7" containerName="console" Dec 01 07:04:50 crc kubenswrapper[4822]: E1201 07:04:50.513977 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="pull" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.513983 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="pull" Dec 01 07:04:50 crc kubenswrapper[4822]: E1201 07:04:50.513991 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="util" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.513998 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="util" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.514097 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="04f4b9dc-ac07-4730-9fed-3d02c5144397" containerName="extract" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.514112 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="839d69e8-399b-4c30-b64f-893327a389e7" containerName="console" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.514614 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.519752 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.519783 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.520018 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.520190 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-xd24t" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.520446 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.530511 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284"] Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.617156 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v7jk\" (UniqueName: \"kubernetes.io/projected/c0272de7-59c2-4625-9cfb-6e6daaf65437-kube-api-access-2v7jk\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.617226 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0272de7-59c2-4625-9cfb-6e6daaf65437-webhook-cert\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.617256 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0272de7-59c2-4625-9cfb-6e6daaf65437-apiservice-cert\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.718743 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0272de7-59c2-4625-9cfb-6e6daaf65437-webhook-cert\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.718813 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0272de7-59c2-4625-9cfb-6e6daaf65437-apiservice-cert\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.718938 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v7jk\" (UniqueName: \"kubernetes.io/projected/c0272de7-59c2-4625-9cfb-6e6daaf65437-kube-api-access-2v7jk\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.726058 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0272de7-59c2-4625-9cfb-6e6daaf65437-apiservice-cert\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.734311 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0272de7-59c2-4625-9cfb-6e6daaf65437-webhook-cert\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.741219 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v7jk\" (UniqueName: \"kubernetes.io/projected/c0272de7-59c2-4625-9cfb-6e6daaf65437-kube-api-access-2v7jk\") pod \"metallb-operator-controller-manager-f6cc7bfbc-4z284\" (UID: \"c0272de7-59c2-4625-9cfb-6e6daaf65437\") " pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.835742 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.876691 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5"] Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.877314 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.881420 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.881585 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xqfhc" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.884006 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 01 07:04:50 crc kubenswrapper[4822]: I1201 07:04:50.893079 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5"] Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.022637 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/50359fe9-284a-489c-85ba-619ce67e1eb6-apiservice-cert\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.023001 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/50359fe9-284a-489c-85ba-619ce67e1eb6-webhook-cert\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.023029 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxmsv\" (UniqueName: \"kubernetes.io/projected/50359fe9-284a-489c-85ba-619ce67e1eb6-kube-api-access-kxmsv\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.119850 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284"] Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.124348 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/50359fe9-284a-489c-85ba-619ce67e1eb6-webhook-cert\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.124388 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/50359fe9-284a-489c-85ba-619ce67e1eb6-apiservice-cert\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.124419 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxmsv\" (UniqueName: \"kubernetes.io/projected/50359fe9-284a-489c-85ba-619ce67e1eb6-kube-api-access-kxmsv\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.130616 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/50359fe9-284a-489c-85ba-619ce67e1eb6-apiservice-cert\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.132194 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/50359fe9-284a-489c-85ba-619ce67e1eb6-webhook-cert\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.156428 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxmsv\" (UniqueName: \"kubernetes.io/projected/50359fe9-284a-489c-85ba-619ce67e1eb6-kube-api-access-kxmsv\") pod \"metallb-operator-webhook-server-f677cc54b-xd4c5\" (UID: \"50359fe9-284a-489c-85ba-619ce67e1eb6\") " pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.214671 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.477805 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5"] Dec 01 07:04:51 crc kubenswrapper[4822]: W1201 07:04:51.486010 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50359fe9_284a_489c_85ba_619ce67e1eb6.slice/crio-5c7a9f9913f8390edcc40c83ce728f534e4192f96a290742f7d71291a3c5afb2 WatchSource:0}: Error finding container 5c7a9f9913f8390edcc40c83ce728f534e4192f96a290742f7d71291a3c5afb2: Status 404 returned error can't find the container with id 5c7a9f9913f8390edcc40c83ce728f534e4192f96a290742f7d71291a3c5afb2 Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.823889 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" event={"ID":"c0272de7-59c2-4625-9cfb-6e6daaf65437","Type":"ContainerStarted","Data":"1d39bc58fdc121567bcd6908b0e580cc9223dc0bd8baad10b0da3fb431f87762"} Dec 01 07:04:51 crc kubenswrapper[4822]: I1201 07:04:51.825454 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" event={"ID":"50359fe9-284a-489c-85ba-619ce67e1eb6","Type":"ContainerStarted","Data":"5c7a9f9913f8390edcc40c83ce728f534e4192f96a290742f7d71291a3c5afb2"} Dec 01 07:04:57 crc kubenswrapper[4822]: I1201 07:04:57.867135 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" event={"ID":"c0272de7-59c2-4625-9cfb-6e6daaf65437","Type":"ContainerStarted","Data":"d616e6e9ba1ee6a06123f9a6400743406840142c821d7f106d2c9d5cb59a3a20"} Dec 01 07:04:57 crc kubenswrapper[4822]: I1201 07:04:57.868531 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:04:57 crc kubenswrapper[4822]: I1201 07:04:57.870495 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" event={"ID":"50359fe9-284a-489c-85ba-619ce67e1eb6","Type":"ContainerStarted","Data":"4dc1f7c08c7827cdce8c2f1e01cf84fa8d20677b82a692aff370e79750c9b8aa"} Dec 01 07:04:57 crc kubenswrapper[4822]: I1201 07:04:57.871309 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:04:57 crc kubenswrapper[4822]: I1201 07:04:57.896668 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" podStartSLOduration=1.726301725 podStartE2EDuration="7.896646658s" podCreationTimestamp="2025-12-01 07:04:50 +0000 UTC" firstStartedPulling="2025-12-01 07:04:51.137729175 +0000 UTC m=+846.458536861" lastFinishedPulling="2025-12-01 07:04:57.308074108 +0000 UTC m=+852.628881794" observedRunningTime="2025-12-01 07:04:57.892610305 +0000 UTC m=+853.213418021" watchObservedRunningTime="2025-12-01 07:04:57.896646658 +0000 UTC m=+853.217454354" Dec 01 07:04:57 crc kubenswrapper[4822]: I1201 07:04:57.922122 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" podStartSLOduration=2.078882143 podStartE2EDuration="7.922094291s" podCreationTimestamp="2025-12-01 07:04:50 +0000 UTC" firstStartedPulling="2025-12-01 07:04:51.489299855 +0000 UTC m=+846.810107531" lastFinishedPulling="2025-12-01 07:04:57.332511993 +0000 UTC m=+852.653319679" observedRunningTime="2025-12-01 07:04:57.917260996 +0000 UTC m=+853.238068712" watchObservedRunningTime="2025-12-01 07:04:57.922094291 +0000 UTC m=+853.242901977" Dec 01 07:05:11 crc kubenswrapper[4822]: I1201 07:05:11.220677 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-f677cc54b-xd4c5" Dec 01 07:05:30 crc kubenswrapper[4822]: I1201 07:05:30.839975 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f6cc7bfbc-4z284" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.587475 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-6lntj"] Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.591320 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.591584 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t"] Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.592620 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.595237 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.595454 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mfzb9" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.595639 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.595793 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.599995 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t"] Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.709200 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-glkvk"] Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.710369 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.714069 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.714637 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.714783 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.714912 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-mv5vm" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.721201 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-b25m8"] Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.722483 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.724499 4822 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.733083 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-b25m8"] Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.757644 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qdjq\" (UniqueName: \"kubernetes.io/projected/59f52292-bc58-4afb-b290-6b1ab09a2187-kube-api-access-6qdjq\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758030 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-metrics\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758127 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-conf\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758232 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/59f52292-bc58-4afb-b290-6b1ab09a2187-metrics-certs\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758344 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5824cc48-45a9-4c0e-80f1-e8305911cccc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758380 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-reloader\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758417 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkfxg\" (UniqueName: \"kubernetes.io/projected/5824cc48-45a9-4c0e-80f1-e8305911cccc-kube-api-access-bkfxg\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758450 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-startup\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.758468 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-sockets\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860032 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-sockets\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860104 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgwtf\" (UniqueName: \"kubernetes.io/projected/0bfbda8a-cb53-46e0-8deb-180e26af5e36-kube-api-access-sgwtf\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860152 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-metrics-certs\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860187 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qdjq\" (UniqueName: \"kubernetes.io/projected/59f52292-bc58-4afb-b290-6b1ab09a2187-kube-api-access-6qdjq\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860225 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqwgx\" (UniqueName: \"kubernetes.io/projected/4acda3d2-5fc5-4827-bdab-6369a308f6aa-kube-api-access-xqwgx\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860420 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-metrics\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860508 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860564 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-conf\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860596 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/59f52292-bc58-4afb-b290-6b1ab09a2187-metrics-certs\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860663 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-cert\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860688 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5824cc48-45a9-4c0e-80f1-e8305911cccc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860705 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-metrics-certs\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860738 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-reloader\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860822 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4acda3d2-5fc5-4827-bdab-6369a308f6aa-metallb-excludel2\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860856 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkfxg\" (UniqueName: \"kubernetes.io/projected/5824cc48-45a9-4c0e-80f1-e8305911cccc-kube-api-access-bkfxg\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.860930 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-startup\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.861139 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-sockets\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.861188 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-metrics\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: E1201 07:05:31.861300 4822 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 01 07:05:31 crc kubenswrapper[4822]: E1201 07:05:31.861367 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5824cc48-45a9-4c0e-80f1-e8305911cccc-cert podName:5824cc48-45a9-4c0e-80f1-e8305911cccc nodeName:}" failed. No retries permitted until 2025-12-01 07:05:32.361342443 +0000 UTC m=+887.682150129 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/5824cc48-45a9-4c0e-80f1-e8305911cccc-cert") pod "frr-k8s-webhook-server-7fcb986d4-k644t" (UID: "5824cc48-45a9-4c0e-80f1-e8305911cccc") : secret "frr-k8s-webhook-server-cert" not found Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.861741 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-reloader\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.861987 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-conf\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.862459 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/59f52292-bc58-4afb-b290-6b1ab09a2187-frr-startup\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.873664 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/59f52292-bc58-4afb-b290-6b1ab09a2187-metrics-certs\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.882119 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qdjq\" (UniqueName: \"kubernetes.io/projected/59f52292-bc58-4afb-b290-6b1ab09a2187-kube-api-access-6qdjq\") pod \"frr-k8s-6lntj\" (UID: \"59f52292-bc58-4afb-b290-6b1ab09a2187\") " pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.885524 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkfxg\" (UniqueName: \"kubernetes.io/projected/5824cc48-45a9-4c0e-80f1-e8305911cccc-kube-api-access-bkfxg\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.922421 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962016 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqwgx\" (UniqueName: \"kubernetes.io/projected/4acda3d2-5fc5-4827-bdab-6369a308f6aa-kube-api-access-xqwgx\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962168 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962263 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-cert\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962338 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-metrics-certs\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962417 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4acda3d2-5fc5-4827-bdab-6369a308f6aa-metallb-excludel2\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962518 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgwtf\" (UniqueName: \"kubernetes.io/projected/0bfbda8a-cb53-46e0-8deb-180e26af5e36-kube-api-access-sgwtf\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.962624 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-metrics-certs\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: E1201 07:05:31.962814 4822 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 01 07:05:31 crc kubenswrapper[4822]: E1201 07:05:31.962920 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-metrics-certs podName:0bfbda8a-cb53-46e0-8deb-180e26af5e36 nodeName:}" failed. No retries permitted until 2025-12-01 07:05:32.462905039 +0000 UTC m=+887.783712725 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-metrics-certs") pod "controller-f8648f98b-b25m8" (UID: "0bfbda8a-cb53-46e0-8deb-180e26af5e36") : secret "controller-certs-secret" not found Dec 01 07:05:31 crc kubenswrapper[4822]: E1201 07:05:31.963214 4822 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 07:05:31 crc kubenswrapper[4822]: E1201 07:05:31.963299 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist podName:4acda3d2-5fc5-4827-bdab-6369a308f6aa nodeName:}" failed. No retries permitted until 2025-12-01 07:05:32.46329071 +0000 UTC m=+887.784098396 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist") pod "speaker-glkvk" (UID: "4acda3d2-5fc5-4827-bdab-6369a308f6aa") : secret "metallb-memberlist" not found Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.964959 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4acda3d2-5fc5-4827-bdab-6369a308f6aa-metallb-excludel2\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.968595 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-cert\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:31 crc kubenswrapper[4822]: I1201 07:05:31.969147 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-metrics-certs\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.008150 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqwgx\" (UniqueName: \"kubernetes.io/projected/4acda3d2-5fc5-4827-bdab-6369a308f6aa-kube-api-access-xqwgx\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.023320 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgwtf\" (UniqueName: \"kubernetes.io/projected/0bfbda8a-cb53-46e0-8deb-180e26af5e36-kube-api-access-sgwtf\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.369889 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5824cc48-45a9-4c0e-80f1-e8305911cccc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.374809 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5824cc48-45a9-4c0e-80f1-e8305911cccc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-k644t\" (UID: \"5824cc48-45a9-4c0e-80f1-e8305911cccc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.471243 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:32 crc kubenswrapper[4822]: E1201 07:05:32.471530 4822 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.471821 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-metrics-certs\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:32 crc kubenswrapper[4822]: E1201 07:05:32.472203 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist podName:4acda3d2-5fc5-4827-bdab-6369a308f6aa nodeName:}" failed. No retries permitted until 2025-12-01 07:05:33.472167997 +0000 UTC m=+888.792975693 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist") pod "speaker-glkvk" (UID: "4acda3d2-5fc5-4827-bdab-6369a308f6aa") : secret "metallb-memberlist" not found Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.475835 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0bfbda8a-cb53-46e0-8deb-180e26af5e36-metrics-certs\") pod \"controller-f8648f98b-b25m8\" (UID: \"0bfbda8a-cb53-46e0-8deb-180e26af5e36\") " pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.533620 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.642883 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.800460 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t"] Dec 01 07:05:32 crc kubenswrapper[4822]: I1201 07:05:32.859422 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-b25m8"] Dec 01 07:05:32 crc kubenswrapper[4822]: W1201 07:05:32.861943 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0bfbda8a_cb53_46e0_8deb_180e26af5e36.slice/crio-3aeb4e360ed623390376329b278f3fa8434d304feb747917bcf5671a9481c118 WatchSource:0}: Error finding container 3aeb4e360ed623390376329b278f3fa8434d304feb747917bcf5671a9481c118: Status 404 returned error can't find the container with id 3aeb4e360ed623390376329b278f3fa8434d304feb747917bcf5671a9481c118 Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.115832 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"ede3276f7d421e61248a08930f75f9913353edc7fff0bfbadde086c853cb4edf"} Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.117079 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" event={"ID":"5824cc48-45a9-4c0e-80f1-e8305911cccc","Type":"ContainerStarted","Data":"1deff9242524dc3a0e2d55f7c4f56aaf8b4514665831339c0e130be394d01fed"} Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.120900 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-b25m8" event={"ID":"0bfbda8a-cb53-46e0-8deb-180e26af5e36","Type":"ContainerStarted","Data":"984c6155c982bfe623b692bf6d0e7a4f0f729cffba5dfa77a23dc0620c9051f6"} Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.120929 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-b25m8" event={"ID":"0bfbda8a-cb53-46e0-8deb-180e26af5e36","Type":"ContainerStarted","Data":"3aeb4e360ed623390376329b278f3fa8434d304feb747917bcf5671a9481c118"} Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.493727 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.510530 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4acda3d2-5fc5-4827-bdab-6369a308f6aa-memberlist\") pod \"speaker-glkvk\" (UID: \"4acda3d2-5fc5-4827-bdab-6369a308f6aa\") " pod="metallb-system/speaker-glkvk" Dec 01 07:05:33 crc kubenswrapper[4822]: I1201 07:05:33.536145 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-glkvk" Dec 01 07:05:33 crc kubenswrapper[4822]: W1201 07:05:33.555960 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4acda3d2_5fc5_4827_bdab_6369a308f6aa.slice/crio-661a54d18e4dd983953eb391c71f456db8c355d0621b1a80d59ba5003447e3bf WatchSource:0}: Error finding container 661a54d18e4dd983953eb391c71f456db8c355d0621b1a80d59ba5003447e3bf: Status 404 returned error can't find the container with id 661a54d18e4dd983953eb391c71f456db8c355d0621b1a80d59ba5003447e3bf Dec 01 07:05:34 crc kubenswrapper[4822]: I1201 07:05:34.151218 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-glkvk" event={"ID":"4acda3d2-5fc5-4827-bdab-6369a308f6aa","Type":"ContainerStarted","Data":"e1fb50e1c32c051cc8262c7f3a5facd37097b86060c3db172b60fc286c3f3328"} Dec 01 07:05:34 crc kubenswrapper[4822]: I1201 07:05:34.151969 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-glkvk" event={"ID":"4acda3d2-5fc5-4827-bdab-6369a308f6aa","Type":"ContainerStarted","Data":"661a54d18e4dd983953eb391c71f456db8c355d0621b1a80d59ba5003447e3bf"} Dec 01 07:05:34 crc kubenswrapper[4822]: I1201 07:05:34.168612 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-b25m8" event={"ID":"0bfbda8a-cb53-46e0-8deb-180e26af5e36","Type":"ContainerStarted","Data":"aaf97368646f4150310cf1fcc7f39c2033140c3ebf3675ccf81a60b3ff62f618"} Dec 01 07:05:34 crc kubenswrapper[4822]: I1201 07:05:34.169266 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:34 crc kubenswrapper[4822]: I1201 07:05:34.211804 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-b25m8" podStartSLOduration=3.211787486 podStartE2EDuration="3.211787486s" podCreationTimestamp="2025-12-01 07:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:05:34.208044751 +0000 UTC m=+889.528852437" watchObservedRunningTime="2025-12-01 07:05:34.211787486 +0000 UTC m=+889.532595172" Dec 01 07:05:35 crc kubenswrapper[4822]: I1201 07:05:35.179201 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-glkvk" event={"ID":"4acda3d2-5fc5-4827-bdab-6369a308f6aa","Type":"ContainerStarted","Data":"fb3b141127baafce455777a294aea93224bedb7ba2e03c54c4bcb6d77841cfd9"} Dec 01 07:05:35 crc kubenswrapper[4822]: I1201 07:05:35.204444 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-glkvk" podStartSLOduration=4.204418977 podStartE2EDuration="4.204418977s" podCreationTimestamp="2025-12-01 07:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:05:35.198950314 +0000 UTC m=+890.519758000" watchObservedRunningTime="2025-12-01 07:05:35.204418977 +0000 UTC m=+890.525226663" Dec 01 07:05:36 crc kubenswrapper[4822]: I1201 07:05:36.184070 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-glkvk" Dec 01 07:05:40 crc kubenswrapper[4822]: I1201 07:05:40.278673 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" event={"ID":"5824cc48-45a9-4c0e-80f1-e8305911cccc","Type":"ContainerStarted","Data":"75bb4f04d77a78c069b1b4ad67d5ad9b4cb3eb771bcf62b042ac28c962c668d0"} Dec 01 07:05:40 crc kubenswrapper[4822]: I1201 07:05:40.279637 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:40 crc kubenswrapper[4822]: I1201 07:05:40.282159 4822 generic.go:334] "Generic (PLEG): container finished" podID="59f52292-bc58-4afb-b290-6b1ab09a2187" containerID="c5570e6bed15b3dae5d54f9539fbf2b7fbdb6c1a9aea21ec2609d1fa2cdbfbc4" exitCode=0 Dec 01 07:05:40 crc kubenswrapper[4822]: I1201 07:05:40.282214 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerDied","Data":"c5570e6bed15b3dae5d54f9539fbf2b7fbdb6c1a9aea21ec2609d1fa2cdbfbc4"} Dec 01 07:05:40 crc kubenswrapper[4822]: I1201 07:05:40.330184 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" podStartSLOduration=2.661650973 podStartE2EDuration="9.330153753s" podCreationTimestamp="2025-12-01 07:05:31 +0000 UTC" firstStartedPulling="2025-12-01 07:05:32.802381689 +0000 UTC m=+888.123189375" lastFinishedPulling="2025-12-01 07:05:39.470884459 +0000 UTC m=+894.791692155" observedRunningTime="2025-12-01 07:05:40.302109218 +0000 UTC m=+895.622916914" watchObservedRunningTime="2025-12-01 07:05:40.330153753 +0000 UTC m=+895.650961449" Dec 01 07:05:41 crc kubenswrapper[4822]: I1201 07:05:41.292782 4822 generic.go:334] "Generic (PLEG): container finished" podID="59f52292-bc58-4afb-b290-6b1ab09a2187" containerID="62be21f52834bf03edbd31f9d019618adbe3d2a0125e25d590c32abc7e808252" exitCode=0 Dec 01 07:05:41 crc kubenswrapper[4822]: I1201 07:05:41.292927 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerDied","Data":"62be21f52834bf03edbd31f9d019618adbe3d2a0125e25d590c32abc7e808252"} Dec 01 07:05:42 crc kubenswrapper[4822]: I1201 07:05:42.300436 4822 generic.go:334] "Generic (PLEG): container finished" podID="59f52292-bc58-4afb-b290-6b1ab09a2187" containerID="1e083bed5c7c0715689cecbcf1419ebe696993a21639fe9998973ec6f3b9c52a" exitCode=0 Dec 01 07:05:42 crc kubenswrapper[4822]: I1201 07:05:42.300525 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerDied","Data":"1e083bed5c7c0715689cecbcf1419ebe696993a21639fe9998973ec6f3b9c52a"} Dec 01 07:05:42 crc kubenswrapper[4822]: I1201 07:05:42.543205 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:05:42 crc kubenswrapper[4822]: I1201 07:05:42.543288 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:05:42 crc kubenswrapper[4822]: I1201 07:05:42.649598 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-b25m8" Dec 01 07:05:43 crc kubenswrapper[4822]: I1201 07:05:43.310766 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"1ce511f844378c138820f87ad7e4c15f3498a399ea925f983ca1cf2da6fe9cd6"} Dec 01 07:05:43 crc kubenswrapper[4822]: I1201 07:05:43.311087 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"a697e7747ee1c811f933432321d2c4b5369cf6508404347d856ff0af0c0af381"} Dec 01 07:05:43 crc kubenswrapper[4822]: I1201 07:05:43.311097 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"df17a6d90cac53119a84ad1ef7a90735569bf2b8d34b855811a0530ee9614f61"} Dec 01 07:05:43 crc kubenswrapper[4822]: I1201 07:05:43.541501 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-glkvk" Dec 01 07:05:44 crc kubenswrapper[4822]: I1201 07:05:44.323029 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"def5d2b1d991cdcbad033580ee0c96b6a41d7e97851a866ba228ad5196617297"} Dec 01 07:05:44 crc kubenswrapper[4822]: I1201 07:05:44.323069 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"822f9796627999822d2f97b07f1ad80844fc264a1837e4db5eeb1ad0af163182"} Dec 01 07:05:44 crc kubenswrapper[4822]: I1201 07:05:44.323079 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6lntj" event={"ID":"59f52292-bc58-4afb-b290-6b1ab09a2187","Type":"ContainerStarted","Data":"ea5d26aa2275adcb98d52d9787f621b488c4fcf98dc01ff3ea06c1b48da24ba2"} Dec 01 07:05:44 crc kubenswrapper[4822]: I1201 07:05:44.324141 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:44 crc kubenswrapper[4822]: I1201 07:05:44.356607 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-6lntj" podStartSLOduration=6.011590978 podStartE2EDuration="13.356586302s" podCreationTimestamp="2025-12-01 07:05:31 +0000 UTC" firstStartedPulling="2025-12-01 07:05:32.118356324 +0000 UTC m=+887.439164010" lastFinishedPulling="2025-12-01 07:05:39.463351638 +0000 UTC m=+894.784159334" observedRunningTime="2025-12-01 07:05:44.349902234 +0000 UTC m=+899.670709910" watchObservedRunningTime="2025-12-01 07:05:44.356586302 +0000 UTC m=+899.677393988" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.180285 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq"] Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.182118 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.185239 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.201477 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq"] Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.364795 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.365119 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.365199 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j9n5\" (UniqueName: \"kubernetes.io/projected/238d02e4-3800-4fdd-8e17-2b4e0261eea8-kube-api-access-4j9n5\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.467365 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.467476 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.467505 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j9n5\" (UniqueName: \"kubernetes.io/projected/238d02e4-3800-4fdd-8e17-2b4e0261eea8-kube-api-access-4j9n5\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.468280 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.469947 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.499737 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j9n5\" (UniqueName: \"kubernetes.io/projected/238d02e4-3800-4fdd-8e17-2b4e0261eea8-kube-api-access-4j9n5\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.500106 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:45 crc kubenswrapper[4822]: I1201 07:05:45.746001 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq"] Dec 01 07:05:45 crc kubenswrapper[4822]: W1201 07:05:45.753915 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod238d02e4_3800_4fdd_8e17_2b4e0261eea8.slice/crio-9e7364061354e59e10ef8b0ac8e9e4585e3354c6f5209a005e00e47393470f24 WatchSource:0}: Error finding container 9e7364061354e59e10ef8b0ac8e9e4585e3354c6f5209a005e00e47393470f24: Status 404 returned error can't find the container with id 9e7364061354e59e10ef8b0ac8e9e4585e3354c6f5209a005e00e47393470f24 Dec 01 07:05:46 crc kubenswrapper[4822]: I1201 07:05:46.342609 4822 generic.go:334] "Generic (PLEG): container finished" podID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerID="e2fd6b1d32b37957e2945ac7a8d0f5e63bacfa2fbdba6f4c4949046c6aa531a6" exitCode=0 Dec 01 07:05:46 crc kubenswrapper[4822]: I1201 07:05:46.342714 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" event={"ID":"238d02e4-3800-4fdd-8e17-2b4e0261eea8","Type":"ContainerDied","Data":"e2fd6b1d32b37957e2945ac7a8d0f5e63bacfa2fbdba6f4c4949046c6aa531a6"} Dec 01 07:05:46 crc kubenswrapper[4822]: I1201 07:05:46.343111 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" event={"ID":"238d02e4-3800-4fdd-8e17-2b4e0261eea8","Type":"ContainerStarted","Data":"9e7364061354e59e10ef8b0ac8e9e4585e3354c6f5209a005e00e47393470f24"} Dec 01 07:05:46 crc kubenswrapper[4822]: I1201 07:05:46.967917 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:47 crc kubenswrapper[4822]: I1201 07:05:47.012752 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-6lntj" Dec 01 07:05:50 crc kubenswrapper[4822]: I1201 07:05:50.377958 4822 generic.go:334] "Generic (PLEG): container finished" podID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerID="86f55f020630a452ca9a22415c582829c13f86d767259da6d6690afc6be4e869" exitCode=0 Dec 01 07:05:50 crc kubenswrapper[4822]: I1201 07:05:50.378080 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" event={"ID":"238d02e4-3800-4fdd-8e17-2b4e0261eea8","Type":"ContainerDied","Data":"86f55f020630a452ca9a22415c582829c13f86d767259da6d6690afc6be4e869"} Dec 01 07:05:51 crc kubenswrapper[4822]: I1201 07:05:51.390767 4822 generic.go:334] "Generic (PLEG): container finished" podID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerID="e74003c205183298059d08bf9a0ab7549b24df3cf7ac95d3d4e6f9b9cb37ca41" exitCode=0 Dec 01 07:05:51 crc kubenswrapper[4822]: I1201 07:05:51.390829 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" event={"ID":"238d02e4-3800-4fdd-8e17-2b4e0261eea8","Type":"ContainerDied","Data":"e74003c205183298059d08bf9a0ab7549b24df3cf7ac95d3d4e6f9b9cb37ca41"} Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.541503 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-k644t" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.695209 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.792916 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-bundle\") pod \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.793934 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4j9n5\" (UniqueName: \"kubernetes.io/projected/238d02e4-3800-4fdd-8e17-2b4e0261eea8-kube-api-access-4j9n5\") pod \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.794010 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-bundle" (OuterVolumeSpecName: "bundle") pod "238d02e4-3800-4fdd-8e17-2b4e0261eea8" (UID: "238d02e4-3800-4fdd-8e17-2b4e0261eea8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.794138 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-util\") pod \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\" (UID: \"238d02e4-3800-4fdd-8e17-2b4e0261eea8\") " Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.794838 4822 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.802043 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/238d02e4-3800-4fdd-8e17-2b4e0261eea8-kube-api-access-4j9n5" (OuterVolumeSpecName: "kube-api-access-4j9n5") pod "238d02e4-3800-4fdd-8e17-2b4e0261eea8" (UID: "238d02e4-3800-4fdd-8e17-2b4e0261eea8"). InnerVolumeSpecName "kube-api-access-4j9n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.804155 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-util" (OuterVolumeSpecName: "util") pod "238d02e4-3800-4fdd-8e17-2b4e0261eea8" (UID: "238d02e4-3800-4fdd-8e17-2b4e0261eea8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.896098 4822 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/238d02e4-3800-4fdd-8e17-2b4e0261eea8-util\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:52 crc kubenswrapper[4822]: I1201 07:05:52.896145 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4j9n5\" (UniqueName: \"kubernetes.io/projected/238d02e4-3800-4fdd-8e17-2b4e0261eea8-kube-api-access-4j9n5\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.332527 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dqc6c"] Dec 01 07:05:53 crc kubenswrapper[4822]: E1201 07:05:53.333166 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="util" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.333187 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="util" Dec 01 07:05:53 crc kubenswrapper[4822]: E1201 07:05:53.333205 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="pull" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.333214 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="pull" Dec 01 07:05:53 crc kubenswrapper[4822]: E1201 07:05:53.333251 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="extract" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.333259 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="extract" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.333348 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="238d02e4-3800-4fdd-8e17-2b4e0261eea8" containerName="extract" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.334106 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.349313 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dqc6c"] Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.404961 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-catalog-content\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.405090 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-utilities\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.405129 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x2h6\" (UniqueName: \"kubernetes.io/projected/ed21605d-e557-41b5-96d8-25b5cd2062f4-kube-api-access-2x2h6\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.415347 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" event={"ID":"238d02e4-3800-4fdd-8e17-2b4e0261eea8","Type":"ContainerDied","Data":"9e7364061354e59e10ef8b0ac8e9e4585e3354c6f5209a005e00e47393470f24"} Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.415411 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e7364061354e59e10ef8b0ac8e9e4585e3354c6f5209a005e00e47393470f24" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.415469 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.506369 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-utilities\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.506433 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x2h6\" (UniqueName: \"kubernetes.io/projected/ed21605d-e557-41b5-96d8-25b5cd2062f4-kube-api-access-2x2h6\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.506537 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-catalog-content\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.506977 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-catalog-content\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.507273 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-utilities\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.523238 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x2h6\" (UniqueName: \"kubernetes.io/projected/ed21605d-e557-41b5-96d8-25b5cd2062f4-kube-api-access-2x2h6\") pod \"community-operators-dqc6c\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:53 crc kubenswrapper[4822]: I1201 07:05:53.651417 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:05:54 crc kubenswrapper[4822]: I1201 07:05:54.144316 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dqc6c"] Dec 01 07:05:54 crc kubenswrapper[4822]: I1201 07:05:54.424153 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerStarted","Data":"bae92791a5da7f0464b7449507c2d76456b5e2e7cafffde8c973329c2e7ca82f"} Dec 01 07:05:55 crc kubenswrapper[4822]: I1201 07:05:55.430969 4822 generic.go:334] "Generic (PLEG): container finished" podID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerID="70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d" exitCode=0 Dec 01 07:05:55 crc kubenswrapper[4822]: I1201 07:05:55.431199 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerDied","Data":"70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d"} Dec 01 07:05:56 crc kubenswrapper[4822]: I1201 07:05:56.439654 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerStarted","Data":"45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757"} Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.448634 4822 generic.go:334] "Generic (PLEG): container finished" podID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerID="45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757" exitCode=0 Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.448747 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerDied","Data":"45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757"} Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.888427 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw"] Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.889333 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.891202 4822 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-md55x" Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.892170 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.892347 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 01 07:05:57 crc kubenswrapper[4822]: I1201 07:05:57.908272 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw"] Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.083027 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a86e9c07-4301-4a98-9e27-2bf487fda8e4-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sp8vw\" (UID: \"a86e9c07-4301-4a98-9e27-2bf487fda8e4\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.083115 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhmgc\" (UniqueName: \"kubernetes.io/projected/a86e9c07-4301-4a98-9e27-2bf487fda8e4-kube-api-access-jhmgc\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sp8vw\" (UID: \"a86e9c07-4301-4a98-9e27-2bf487fda8e4\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.184629 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a86e9c07-4301-4a98-9e27-2bf487fda8e4-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sp8vw\" (UID: \"a86e9c07-4301-4a98-9e27-2bf487fda8e4\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.184806 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhmgc\" (UniqueName: \"kubernetes.io/projected/a86e9c07-4301-4a98-9e27-2bf487fda8e4-kube-api-access-jhmgc\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sp8vw\" (UID: \"a86e9c07-4301-4a98-9e27-2bf487fda8e4\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.185201 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a86e9c07-4301-4a98-9e27-2bf487fda8e4-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sp8vw\" (UID: \"a86e9c07-4301-4a98-9e27-2bf487fda8e4\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.221184 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhmgc\" (UniqueName: \"kubernetes.io/projected/a86e9c07-4301-4a98-9e27-2bf487fda8e4-kube-api-access-jhmgc\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sp8vw\" (UID: \"a86e9c07-4301-4a98-9e27-2bf487fda8e4\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.456746 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerStarted","Data":"4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682"} Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.477082 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dqc6c" podStartSLOduration=2.672156014 podStartE2EDuration="5.477054273s" podCreationTimestamp="2025-12-01 07:05:53 +0000 UTC" firstStartedPulling="2025-12-01 07:05:55.433805501 +0000 UTC m=+910.754613187" lastFinishedPulling="2025-12-01 07:05:58.23870376 +0000 UTC m=+913.559511446" observedRunningTime="2025-12-01 07:05:58.475327335 +0000 UTC m=+913.796135021" watchObservedRunningTime="2025-12-01 07:05:58.477054273 +0000 UTC m=+913.797861959" Dec 01 07:05:58 crc kubenswrapper[4822]: I1201 07:05:58.507707 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" Dec 01 07:05:59 crc kubenswrapper[4822]: I1201 07:05:59.014474 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw"] Dec 01 07:05:59 crc kubenswrapper[4822]: W1201 07:05:59.026050 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda86e9c07_4301_4a98_9e27_2bf487fda8e4.slice/crio-fb54aeeed2e3d26baff7881f5ed9805d227cba1264ef4ba2f3e7e1c4bbc11dc8 WatchSource:0}: Error finding container fb54aeeed2e3d26baff7881f5ed9805d227cba1264ef4ba2f3e7e1c4bbc11dc8: Status 404 returned error can't find the container with id fb54aeeed2e3d26baff7881f5ed9805d227cba1264ef4ba2f3e7e1c4bbc11dc8 Dec 01 07:05:59 crc kubenswrapper[4822]: I1201 07:05:59.464176 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" event={"ID":"a86e9c07-4301-4a98-9e27-2bf487fda8e4","Type":"ContainerStarted","Data":"fb54aeeed2e3d26baff7881f5ed9805d227cba1264ef4ba2f3e7e1c4bbc11dc8"} Dec 01 07:06:01 crc kubenswrapper[4822]: I1201 07:06:01.926303 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-6lntj" Dec 01 07:06:02 crc kubenswrapper[4822]: I1201 07:06:02.921228 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ff457"] Dec 01 07:06:02 crc kubenswrapper[4822]: I1201 07:06:02.922984 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:02 crc kubenswrapper[4822]: I1201 07:06:02.937667 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ff457"] Dec 01 07:06:02 crc kubenswrapper[4822]: I1201 07:06:02.966172 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-utilities\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:02 crc kubenswrapper[4822]: I1201 07:06:02.966227 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl4m2\" (UniqueName: \"kubernetes.io/projected/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-kube-api-access-fl4m2\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:02 crc kubenswrapper[4822]: I1201 07:06:02.966245 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-catalog-content\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.067241 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-utilities\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.067316 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl4m2\" (UniqueName: \"kubernetes.io/projected/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-kube-api-access-fl4m2\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.067335 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-catalog-content\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.067869 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-catalog-content\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.068100 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-utilities\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.094253 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl4m2\" (UniqueName: \"kubernetes.io/projected/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-kube-api-access-fl4m2\") pod \"redhat-marketplace-ff457\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.251217 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.652183 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.652288 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:06:03 crc kubenswrapper[4822]: I1201 07:06:03.708405 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:06:04 crc kubenswrapper[4822]: I1201 07:06:04.585693 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:06:07 crc kubenswrapper[4822]: I1201 07:06:07.314780 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dqc6c"] Dec 01 07:06:07 crc kubenswrapper[4822]: I1201 07:06:07.522283 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dqc6c" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="registry-server" containerID="cri-o://4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682" gracePeriod=2 Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.173147 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.271097 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ff457"] Dec 01 07:06:08 crc kubenswrapper[4822]: W1201 07:06:08.283370 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d2ced39_0942_4a2f_bf56_d745be6c5ecb.slice/crio-7a0acb65d4bf020dada726a5d874e78345dc0ad6efc549f7a7c1e1838e78cdef WatchSource:0}: Error finding container 7a0acb65d4bf020dada726a5d874e78345dc0ad6efc549f7a7c1e1838e78cdef: Status 404 returned error can't find the container with id 7a0acb65d4bf020dada726a5d874e78345dc0ad6efc549f7a7c1e1838e78cdef Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.374565 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-utilities\") pod \"ed21605d-e557-41b5-96d8-25b5cd2062f4\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.374710 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-catalog-content\") pod \"ed21605d-e557-41b5-96d8-25b5cd2062f4\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.375645 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-utilities" (OuterVolumeSpecName: "utilities") pod "ed21605d-e557-41b5-96d8-25b5cd2062f4" (UID: "ed21605d-e557-41b5-96d8-25b5cd2062f4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.378747 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x2h6\" (UniqueName: \"kubernetes.io/projected/ed21605d-e557-41b5-96d8-25b5cd2062f4-kube-api-access-2x2h6\") pod \"ed21605d-e557-41b5-96d8-25b5cd2062f4\" (UID: \"ed21605d-e557-41b5-96d8-25b5cd2062f4\") " Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.379179 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.390920 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed21605d-e557-41b5-96d8-25b5cd2062f4-kube-api-access-2x2h6" (OuterVolumeSpecName: "kube-api-access-2x2h6") pod "ed21605d-e557-41b5-96d8-25b5cd2062f4" (UID: "ed21605d-e557-41b5-96d8-25b5cd2062f4"). InnerVolumeSpecName "kube-api-access-2x2h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.425177 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed21605d-e557-41b5-96d8-25b5cd2062f4" (UID: "ed21605d-e557-41b5-96d8-25b5cd2062f4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.480356 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x2h6\" (UniqueName: \"kubernetes.io/projected/ed21605d-e557-41b5-96d8-25b5cd2062f4-kube-api-access-2x2h6\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.480662 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed21605d-e557-41b5-96d8-25b5cd2062f4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.529910 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ff457" event={"ID":"9d2ced39-0942-4a2f-bf56-d745be6c5ecb","Type":"ContainerStarted","Data":"7a0acb65d4bf020dada726a5d874e78345dc0ad6efc549f7a7c1e1838e78cdef"} Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.531934 4822 generic.go:334] "Generic (PLEG): container finished" podID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerID="4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682" exitCode=0 Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.531970 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerDied","Data":"4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682"} Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.532001 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqc6c" event={"ID":"ed21605d-e557-41b5-96d8-25b5cd2062f4","Type":"ContainerDied","Data":"bae92791a5da7f0464b7449507c2d76456b5e2e7cafffde8c973329c2e7ca82f"} Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.532030 4822 scope.go:117] "RemoveContainer" containerID="4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.532195 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqc6c" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.575424 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dqc6c"] Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.580608 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dqc6c"] Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.583221 4822 scope.go:117] "RemoveContainer" containerID="45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.602842 4822 scope.go:117] "RemoveContainer" containerID="70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.627313 4822 scope.go:117] "RemoveContainer" containerID="4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682" Dec 01 07:06:08 crc kubenswrapper[4822]: E1201 07:06:08.628013 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682\": container with ID starting with 4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682 not found: ID does not exist" containerID="4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.628050 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682"} err="failed to get container status \"4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682\": rpc error: code = NotFound desc = could not find container \"4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682\": container with ID starting with 4457f1e2221ae4d095dc5655e2c7c01c61a71b259ecb2ab739bdf47bc3c43682 not found: ID does not exist" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.628077 4822 scope.go:117] "RemoveContainer" containerID="45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757" Dec 01 07:06:08 crc kubenswrapper[4822]: E1201 07:06:08.628419 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757\": container with ID starting with 45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757 not found: ID does not exist" containerID="45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.628450 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757"} err="failed to get container status \"45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757\": rpc error: code = NotFound desc = could not find container \"45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757\": container with ID starting with 45e9011fa60ccd991069678ee73e4554ed9a780e58b06bef5e2a97e2f0361757 not found: ID does not exist" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.628468 4822 scope.go:117] "RemoveContainer" containerID="70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d" Dec 01 07:06:08 crc kubenswrapper[4822]: E1201 07:06:08.628712 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d\": container with ID starting with 70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d not found: ID does not exist" containerID="70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.628739 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d"} err="failed to get container status \"70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d\": rpc error: code = NotFound desc = could not find container \"70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d\": container with ID starting with 70073743eb5c28067d6085f59dff79e39a6d5686821c6de3e327281f10c4830d not found: ID does not exist" Dec 01 07:06:08 crc kubenswrapper[4822]: I1201 07:06:08.959353 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" path="/var/lib/kubelet/pods/ed21605d-e557-41b5-96d8-25b5cd2062f4/volumes" Dec 01 07:06:09 crc kubenswrapper[4822]: I1201 07:06:09.543541 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" event={"ID":"a86e9c07-4301-4a98-9e27-2bf487fda8e4","Type":"ContainerStarted","Data":"c0f531b0d9520fedcbfef6d9c91e80dc2aec62fe99381fd70bfbbeb95766840d"} Dec 01 07:06:09 crc kubenswrapper[4822]: I1201 07:06:09.559884 4822 generic.go:334] "Generic (PLEG): container finished" podID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerID="563c8941c9293feb0e0de6394347b422e4615b15d227c79d3186ce100d7b0444" exitCode=0 Dec 01 07:06:09 crc kubenswrapper[4822]: I1201 07:06:09.559953 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ff457" event={"ID":"9d2ced39-0942-4a2f-bf56-d745be6c5ecb","Type":"ContainerDied","Data":"563c8941c9293feb0e0de6394347b422e4615b15d227c79d3186ce100d7b0444"} Dec 01 07:06:09 crc kubenswrapper[4822]: I1201 07:06:09.584272 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sp8vw" podStartSLOduration=3.723702159 podStartE2EDuration="12.584241246s" podCreationTimestamp="2025-12-01 07:05:57 +0000 UTC" firstStartedPulling="2025-12-01 07:05:59.029073862 +0000 UTC m=+914.349881548" lastFinishedPulling="2025-12-01 07:06:07.889612949 +0000 UTC m=+923.210420635" observedRunningTime="2025-12-01 07:06:09.580122941 +0000 UTC m=+924.900930637" watchObservedRunningTime="2025-12-01 07:06:09.584241246 +0000 UTC m=+924.905048972" Dec 01 07:06:10 crc kubenswrapper[4822]: I1201 07:06:10.567611 4822 generic.go:334] "Generic (PLEG): container finished" podID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerID="d66014c789f0fe0f932c4bdb166f58e50e3124d0c12c5dd078e9725340fd97ab" exitCode=0 Dec 01 07:06:10 crc kubenswrapper[4822]: I1201 07:06:10.567702 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ff457" event={"ID":"9d2ced39-0942-4a2f-bf56-d745be6c5ecb","Type":"ContainerDied","Data":"d66014c789f0fe0f932c4bdb166f58e50e3124d0c12c5dd078e9725340fd97ab"} Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.574004 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-ph822"] Dec 01 07:06:11 crc kubenswrapper[4822]: E1201 07:06:11.574691 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="registry-server" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.574706 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="registry-server" Dec 01 07:06:11 crc kubenswrapper[4822]: E1201 07:06:11.574718 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="extract-content" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.574724 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="extract-content" Dec 01 07:06:11 crc kubenswrapper[4822]: E1201 07:06:11.574737 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="extract-utilities" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.574743 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="extract-utilities" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.574847 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed21605d-e557-41b5-96d8-25b5cd2062f4" containerName="registry-server" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.575228 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.577182 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ff457" event={"ID":"9d2ced39-0942-4a2f-bf56-d745be6c5ecb","Type":"ContainerStarted","Data":"baf73efc51a8b73ef1eb0c2aa104f94245fda65e859edc30cd3dd28c5ca2c325"} Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.578986 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.579498 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.579939 4822 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5n4dj" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.601567 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-ph822"] Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.629090 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7fb450da-1208-4fac-a91a-a83d60cbae08-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-ph822\" (UID: \"7fb450da-1208-4fac-a91a-a83d60cbae08\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.629175 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6twn\" (UniqueName: \"kubernetes.io/projected/7fb450da-1208-4fac-a91a-a83d60cbae08-kube-api-access-c6twn\") pod \"cert-manager-webhook-f4fb5df64-ph822\" (UID: \"7fb450da-1208-4fac-a91a-a83d60cbae08\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.638386 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ff457" podStartSLOduration=8.23886143 podStartE2EDuration="9.638355582s" podCreationTimestamp="2025-12-01 07:06:02 +0000 UTC" firstStartedPulling="2025-12-01 07:06:09.563668279 +0000 UTC m=+924.884475965" lastFinishedPulling="2025-12-01 07:06:10.963162421 +0000 UTC m=+926.283970117" observedRunningTime="2025-12-01 07:06:11.631680765 +0000 UTC m=+926.952488451" watchObservedRunningTime="2025-12-01 07:06:11.638355582 +0000 UTC m=+926.959163268" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.730465 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7fb450da-1208-4fac-a91a-a83d60cbae08-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-ph822\" (UID: \"7fb450da-1208-4fac-a91a-a83d60cbae08\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.730566 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6twn\" (UniqueName: \"kubernetes.io/projected/7fb450da-1208-4fac-a91a-a83d60cbae08-kube-api-access-c6twn\") pod \"cert-manager-webhook-f4fb5df64-ph822\" (UID: \"7fb450da-1208-4fac-a91a-a83d60cbae08\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.756588 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7fb450da-1208-4fac-a91a-a83d60cbae08-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-ph822\" (UID: \"7fb450da-1208-4fac-a91a-a83d60cbae08\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.770518 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6twn\" (UniqueName: \"kubernetes.io/projected/7fb450da-1208-4fac-a91a-a83d60cbae08-kube-api-access-c6twn\") pod \"cert-manager-webhook-f4fb5df64-ph822\" (UID: \"7fb450da-1208-4fac-a91a-a83d60cbae08\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:11 crc kubenswrapper[4822]: I1201 07:06:11.893682 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:12 crc kubenswrapper[4822]: I1201 07:06:12.311166 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-ph822"] Dec 01 07:06:12 crc kubenswrapper[4822]: I1201 07:06:12.543673 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:06:12 crc kubenswrapper[4822]: I1201 07:06:12.543749 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:06:12 crc kubenswrapper[4822]: I1201 07:06:12.583131 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" event={"ID":"7fb450da-1208-4fac-a91a-a83d60cbae08","Type":"ContainerStarted","Data":"3738e21d9c203cd013ca4f903f20d5c43e312b779be650b0e353ce09b0a8b98e"} Dec 01 07:06:13 crc kubenswrapper[4822]: I1201 07:06:13.251928 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:13 crc kubenswrapper[4822]: I1201 07:06:13.252389 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:13 crc kubenswrapper[4822]: I1201 07:06:13.332103 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.698581 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj"] Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.700096 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.702442 4822 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-986xp" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.711359 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj"] Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.819621 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b41c100-758b-41ab-8a96-f46b3be4a01c-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-j2jgj\" (UID: \"0b41c100-758b-41ab-8a96-f46b3be4a01c\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.819959 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9wqd\" (UniqueName: \"kubernetes.io/projected/0b41c100-758b-41ab-8a96-f46b3be4a01c-kube-api-access-z9wqd\") pod \"cert-manager-cainjector-855d9ccff4-j2jgj\" (UID: \"0b41c100-758b-41ab-8a96-f46b3be4a01c\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.921782 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9wqd\" (UniqueName: \"kubernetes.io/projected/0b41c100-758b-41ab-8a96-f46b3be4a01c-kube-api-access-z9wqd\") pod \"cert-manager-cainjector-855d9ccff4-j2jgj\" (UID: \"0b41c100-758b-41ab-8a96-f46b3be4a01c\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.921861 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b41c100-758b-41ab-8a96-f46b3be4a01c-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-j2jgj\" (UID: \"0b41c100-758b-41ab-8a96-f46b3be4a01c\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.956224 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9wqd\" (UniqueName: \"kubernetes.io/projected/0b41c100-758b-41ab-8a96-f46b3be4a01c-kube-api-access-z9wqd\") pod \"cert-manager-cainjector-855d9ccff4-j2jgj\" (UID: \"0b41c100-758b-41ab-8a96-f46b3be4a01c\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:14 crc kubenswrapper[4822]: I1201 07:06:14.961620 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b41c100-758b-41ab-8a96-f46b3be4a01c-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-j2jgj\" (UID: \"0b41c100-758b-41ab-8a96-f46b3be4a01c\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:15 crc kubenswrapper[4822]: I1201 07:06:15.026992 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" Dec 01 07:06:15 crc kubenswrapper[4822]: I1201 07:06:15.451896 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj"] Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.170173 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r5nnn"] Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.172057 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.193216 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r5nnn"] Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.271341 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-utilities\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.271423 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-catalog-content\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.271469 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv5cj\" (UniqueName: \"kubernetes.io/projected/81d57209-c2f8-48a0-9ce9-b45972e743f8-kube-api-access-fv5cj\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.372352 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv5cj\" (UniqueName: \"kubernetes.io/projected/81d57209-c2f8-48a0-9ce9-b45972e743f8-kube-api-access-fv5cj\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.372429 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-utilities\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.372461 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-catalog-content\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.372924 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-catalog-content\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.373079 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-utilities\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.395666 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv5cj\" (UniqueName: \"kubernetes.io/projected/81d57209-c2f8-48a0-9ce9-b45972e743f8-kube-api-access-fv5cj\") pod \"certified-operators-r5nnn\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:18 crc kubenswrapper[4822]: I1201 07:06:18.509616 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.105514 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r5nnn"] Dec 01 07:06:22 crc kubenswrapper[4822]: W1201 07:06:22.206652 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81d57209_c2f8_48a0_9ce9_b45972e743f8.slice/crio-8c2c538c83d8fe6e1580511ffc80200c66c4285fd05c3684ab0b782280bf8ebf WatchSource:0}: Error finding container 8c2c538c83d8fe6e1580511ffc80200c66c4285fd05c3684ab0b782280bf8ebf: Status 404 returned error can't find the container with id 8c2c538c83d8fe6e1580511ffc80200c66c4285fd05c3684ab0b782280bf8ebf Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.660217 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" event={"ID":"0b41c100-758b-41ab-8a96-f46b3be4a01c","Type":"ContainerStarted","Data":"2ddffdd8645150cd20de40abfeb24372a0e4f41e5b43952568b36f2548a318a7"} Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.660254 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" event={"ID":"0b41c100-758b-41ab-8a96-f46b3be4a01c","Type":"ContainerStarted","Data":"917fdd965e8208487f8a132407e0e5647f540790e09b61ab974cb94d3ea842b3"} Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.661895 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" event={"ID":"7fb450da-1208-4fac-a91a-a83d60cbae08","Type":"ContainerStarted","Data":"b28c86352f2e4c086151891b60a4b67d178091cb0de48c98ee3c5aa726a1721f"} Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.662025 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.663888 4822 generic.go:334] "Generic (PLEG): container finished" podID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerID="000068f91c420e257668651accd7a1edaf5f3acf364b91b8c2c2a52f783f6345" exitCode=0 Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.663918 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r5nnn" event={"ID":"81d57209-c2f8-48a0-9ce9-b45972e743f8","Type":"ContainerDied","Data":"000068f91c420e257668651accd7a1edaf5f3acf364b91b8c2c2a52f783f6345"} Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.663966 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r5nnn" event={"ID":"81d57209-c2f8-48a0-9ce9-b45972e743f8","Type":"ContainerStarted","Data":"8c2c538c83d8fe6e1580511ffc80200c66c4285fd05c3684ab0b782280bf8ebf"} Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.687390 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-j2jgj" podStartSLOduration=7.881759906 podStartE2EDuration="8.687369694s" podCreationTimestamp="2025-12-01 07:06:14 +0000 UTC" firstStartedPulling="2025-12-01 07:06:21.66258437 +0000 UTC m=+936.983392066" lastFinishedPulling="2025-12-01 07:06:22.468194158 +0000 UTC m=+937.789001854" observedRunningTime="2025-12-01 07:06:22.681327734 +0000 UTC m=+938.002135450" watchObservedRunningTime="2025-12-01 07:06:22.687369694 +0000 UTC m=+938.008177400" Dec 01 07:06:22 crc kubenswrapper[4822]: I1201 07:06:22.701295 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" podStartSLOduration=2.262374309 podStartE2EDuration="11.701274584s" podCreationTimestamp="2025-12-01 07:06:11 +0000 UTC" firstStartedPulling="2025-12-01 07:06:12.326726694 +0000 UTC m=+927.647534380" lastFinishedPulling="2025-12-01 07:06:21.765626949 +0000 UTC m=+937.086434655" observedRunningTime="2025-12-01 07:06:22.699478303 +0000 UTC m=+938.020286029" watchObservedRunningTime="2025-12-01 07:06:22.701274584 +0000 UTC m=+938.022082300" Dec 01 07:06:23 crc kubenswrapper[4822]: I1201 07:06:23.365912 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:23 crc kubenswrapper[4822]: I1201 07:06:23.544957 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ff457"] Dec 01 07:06:23 crc kubenswrapper[4822]: I1201 07:06:23.673424 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ff457" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="registry-server" containerID="cri-o://baf73efc51a8b73ef1eb0c2aa104f94245fda65e859edc30cd3dd28c5ca2c325" gracePeriod=2 Dec 01 07:06:24 crc kubenswrapper[4822]: I1201 07:06:24.681067 4822 generic.go:334] "Generic (PLEG): container finished" podID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerID="8649b7b5e950f72bf47f69d76b227e896d0b014fda18e7ad7f80ce13c14b0be2" exitCode=0 Dec 01 07:06:24 crc kubenswrapper[4822]: I1201 07:06:24.681145 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r5nnn" event={"ID":"81d57209-c2f8-48a0-9ce9-b45972e743f8","Type":"ContainerDied","Data":"8649b7b5e950f72bf47f69d76b227e896d0b014fda18e7ad7f80ce13c14b0be2"} Dec 01 07:06:24 crc kubenswrapper[4822]: I1201 07:06:24.684822 4822 generic.go:334] "Generic (PLEG): container finished" podID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerID="baf73efc51a8b73ef1eb0c2aa104f94245fda65e859edc30cd3dd28c5ca2c325" exitCode=0 Dec 01 07:06:24 crc kubenswrapper[4822]: I1201 07:06:24.684860 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ff457" event={"ID":"9d2ced39-0942-4a2f-bf56-d745be6c5ecb","Type":"ContainerDied","Data":"baf73efc51a8b73ef1eb0c2aa104f94245fda65e859edc30cd3dd28c5ca2c325"} Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.220220 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.301448 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-catalog-content\") pod \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.301545 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl4m2\" (UniqueName: \"kubernetes.io/projected/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-kube-api-access-fl4m2\") pod \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.301589 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-utilities\") pod \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\" (UID: \"9d2ced39-0942-4a2f-bf56-d745be6c5ecb\") " Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.302783 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-utilities" (OuterVolumeSpecName: "utilities") pod "9d2ced39-0942-4a2f-bf56-d745be6c5ecb" (UID: "9d2ced39-0942-4a2f-bf56-d745be6c5ecb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.309470 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-kube-api-access-fl4m2" (OuterVolumeSpecName: "kube-api-access-fl4m2") pod "9d2ced39-0942-4a2f-bf56-d745be6c5ecb" (UID: "9d2ced39-0942-4a2f-bf56-d745be6c5ecb"). InnerVolumeSpecName "kube-api-access-fl4m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.320290 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9d2ced39-0942-4a2f-bf56-d745be6c5ecb" (UID: "9d2ced39-0942-4a2f-bf56-d745be6c5ecb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.403030 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl4m2\" (UniqueName: \"kubernetes.io/projected/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-kube-api-access-fl4m2\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.403065 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.403074 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d2ced39-0942-4a2f-bf56-d745be6c5ecb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.700251 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ff457" event={"ID":"9d2ced39-0942-4a2f-bf56-d745be6c5ecb","Type":"ContainerDied","Data":"7a0acb65d4bf020dada726a5d874e78345dc0ad6efc549f7a7c1e1838e78cdef"} Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.700280 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ff457" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.700317 4822 scope.go:117] "RemoveContainer" containerID="baf73efc51a8b73ef1eb0c2aa104f94245fda65e859edc30cd3dd28c5ca2c325" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.703294 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r5nnn" event={"ID":"81d57209-c2f8-48a0-9ce9-b45972e743f8","Type":"ContainerStarted","Data":"220f987cbf462aa36f867b793c48e801b091c2b27c7c629ea36d9ecd083354d5"} Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.716598 4822 scope.go:117] "RemoveContainer" containerID="d66014c789f0fe0f932c4bdb166f58e50e3124d0c12c5dd078e9725340fd97ab" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.729110 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r5nnn" podStartSLOduration=5.60841103 podStartE2EDuration="8.729090893s" podCreationTimestamp="2025-12-01 07:06:18 +0000 UTC" firstStartedPulling="2025-12-01 07:06:22.668287728 +0000 UTC m=+937.989095444" lastFinishedPulling="2025-12-01 07:06:25.788967621 +0000 UTC m=+941.109775307" observedRunningTime="2025-12-01 07:06:26.728753173 +0000 UTC m=+942.049560859" watchObservedRunningTime="2025-12-01 07:06:26.729090893 +0000 UTC m=+942.049898579" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.749570 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ff457"] Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.751893 4822 scope.go:117] "RemoveContainer" containerID="563c8941c9293feb0e0de6394347b422e4615b15d227c79d3186ce100d7b0444" Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.755287 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ff457"] Dec 01 07:06:26 crc kubenswrapper[4822]: I1201 07:06:26.963742 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" path="/var/lib/kubelet/pods/9d2ced39-0942-4a2f-bf56-d745be6c5ecb/volumes" Dec 01 07:06:28 crc kubenswrapper[4822]: I1201 07:06:28.510367 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:28 crc kubenswrapper[4822]: I1201 07:06:28.510709 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:28 crc kubenswrapper[4822]: I1201 07:06:28.572671 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.507935 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-2zlrk"] Dec 01 07:06:30 crc kubenswrapper[4822]: E1201 07:06:30.510691 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="extract-content" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.510791 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="extract-content" Dec 01 07:06:30 crc kubenswrapper[4822]: E1201 07:06:30.510874 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="registry-server" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.510968 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="registry-server" Dec 01 07:06:30 crc kubenswrapper[4822]: E1201 07:06:30.511057 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="extract-utilities" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.511118 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="extract-utilities" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.511346 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d2ced39-0942-4a2f-bf56-d745be6c5ecb" containerName="registry-server" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.512133 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.516449 4822 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6zcqq" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.522907 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-2zlrk"] Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.575255 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbt5n\" (UniqueName: \"kubernetes.io/projected/99f4689f-66e4-4b45-94e3-55791c3a186b-kube-api-access-rbt5n\") pod \"cert-manager-86cb77c54b-2zlrk\" (UID: \"99f4689f-66e4-4b45-94e3-55791c3a186b\") " pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.575715 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/99f4689f-66e4-4b45-94e3-55791c3a186b-bound-sa-token\") pod \"cert-manager-86cb77c54b-2zlrk\" (UID: \"99f4689f-66e4-4b45-94e3-55791c3a186b\") " pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.677881 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/99f4689f-66e4-4b45-94e3-55791c3a186b-bound-sa-token\") pod \"cert-manager-86cb77c54b-2zlrk\" (UID: \"99f4689f-66e4-4b45-94e3-55791c3a186b\") " pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.677969 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbt5n\" (UniqueName: \"kubernetes.io/projected/99f4689f-66e4-4b45-94e3-55791c3a186b-kube-api-access-rbt5n\") pod \"cert-manager-86cb77c54b-2zlrk\" (UID: \"99f4689f-66e4-4b45-94e3-55791c3a186b\") " pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.700532 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/99f4689f-66e4-4b45-94e3-55791c3a186b-bound-sa-token\") pod \"cert-manager-86cb77c54b-2zlrk\" (UID: \"99f4689f-66e4-4b45-94e3-55791c3a186b\") " pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.700944 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbt5n\" (UniqueName: \"kubernetes.io/projected/99f4689f-66e4-4b45-94e3-55791c3a186b-kube-api-access-rbt5n\") pod \"cert-manager-86cb77c54b-2zlrk\" (UID: \"99f4689f-66e4-4b45-94e3-55791c3a186b\") " pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:30 crc kubenswrapper[4822]: I1201 07:06:30.836344 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-2zlrk" Dec 01 07:06:31 crc kubenswrapper[4822]: I1201 07:06:31.139038 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-2zlrk"] Dec 01 07:06:31 crc kubenswrapper[4822]: I1201 07:06:31.750011 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-2zlrk" event={"ID":"99f4689f-66e4-4b45-94e3-55791c3a186b","Type":"ContainerStarted","Data":"3bd9a6a88cc0cfe916e1b57ed696ba56983935b1f04686318cdad372a6635326"} Dec 01 07:06:31 crc kubenswrapper[4822]: I1201 07:06:31.750533 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-2zlrk" event={"ID":"99f4689f-66e4-4b45-94e3-55791c3a186b","Type":"ContainerStarted","Data":"41f3a8224a7c2a16e50f9d0e17eb6b548b5f3f85c53e25948a6fbacf34ae930d"} Dec 01 07:06:31 crc kubenswrapper[4822]: I1201 07:06:31.782547 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-2zlrk" podStartSLOduration=1.782514911 podStartE2EDuration="1.782514911s" podCreationTimestamp="2025-12-01 07:06:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:06:31.773102247 +0000 UTC m=+947.093909973" watchObservedRunningTime="2025-12-01 07:06:31.782514911 +0000 UTC m=+947.103322627" Dec 01 07:06:31 crc kubenswrapper[4822]: I1201 07:06:31.899111 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-ph822" Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.905189 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-sq89x"] Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.907821 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.910696 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.911047 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.918170 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sq89x"] Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.943817 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5jq4\" (UniqueName: \"kubernetes.io/projected/85030b09-4363-489f-8299-65c5e08b4571-kube-api-access-w5jq4\") pod \"openstack-operator-index-sq89x\" (UID: \"85030b09-4363-489f-8299-65c5e08b4571\") " pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:34 crc kubenswrapper[4822]: I1201 07:06:34.953244 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-t4wkc" Dec 01 07:06:35 crc kubenswrapper[4822]: I1201 07:06:35.046081 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5jq4\" (UniqueName: \"kubernetes.io/projected/85030b09-4363-489f-8299-65c5e08b4571-kube-api-access-w5jq4\") pod \"openstack-operator-index-sq89x\" (UID: \"85030b09-4363-489f-8299-65c5e08b4571\") " pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:35 crc kubenswrapper[4822]: I1201 07:06:35.069450 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5jq4\" (UniqueName: \"kubernetes.io/projected/85030b09-4363-489f-8299-65c5e08b4571-kube-api-access-w5jq4\") pod \"openstack-operator-index-sq89x\" (UID: \"85030b09-4363-489f-8299-65c5e08b4571\") " pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:35 crc kubenswrapper[4822]: I1201 07:06:35.272723 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:35 crc kubenswrapper[4822]: I1201 07:06:35.613816 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sq89x"] Dec 01 07:06:35 crc kubenswrapper[4822]: W1201 07:06:35.616249 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85030b09_4363_489f_8299_65c5e08b4571.slice/crio-c24314527faaf215ffb2bb2c9ba586474645f10da3612fa4e7c1ba0ef5ae2fea WatchSource:0}: Error finding container c24314527faaf215ffb2bb2c9ba586474645f10da3612fa4e7c1ba0ef5ae2fea: Status 404 returned error can't find the container with id c24314527faaf215ffb2bb2c9ba586474645f10da3612fa4e7c1ba0ef5ae2fea Dec 01 07:06:35 crc kubenswrapper[4822]: I1201 07:06:35.787899 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sq89x" event={"ID":"85030b09-4363-489f-8299-65c5e08b4571","Type":"ContainerStarted","Data":"c24314527faaf215ffb2bb2c9ba586474645f10da3612fa4e7c1ba0ef5ae2fea"} Dec 01 07:06:37 crc kubenswrapper[4822]: I1201 07:06:37.804184 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sq89x" event={"ID":"85030b09-4363-489f-8299-65c5e08b4571","Type":"ContainerStarted","Data":"eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc"} Dec 01 07:06:37 crc kubenswrapper[4822]: I1201 07:06:37.843457 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-sq89x" podStartSLOduration=2.620135087 podStartE2EDuration="3.843423627s" podCreationTimestamp="2025-12-01 07:06:34 +0000 UTC" firstStartedPulling="2025-12-01 07:06:35.619492759 +0000 UTC m=+950.940300445" lastFinishedPulling="2025-12-01 07:06:36.842781289 +0000 UTC m=+952.163588985" observedRunningTime="2025-12-01 07:06:37.823713975 +0000 UTC m=+953.144521721" watchObservedRunningTime="2025-12-01 07:06:37.843423627 +0000 UTC m=+953.164231313" Dec 01 07:06:38 crc kubenswrapper[4822]: I1201 07:06:38.273887 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sq89x"] Dec 01 07:06:38 crc kubenswrapper[4822]: I1201 07:06:38.596686 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:38 crc kubenswrapper[4822]: I1201 07:06:38.877762 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-lrntz"] Dec 01 07:06:38 crc kubenswrapper[4822]: I1201 07:06:38.878969 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:38 crc kubenswrapper[4822]: I1201 07:06:38.904831 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-lrntz"] Dec 01 07:06:39 crc kubenswrapper[4822]: I1201 07:06:39.011792 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntjxv\" (UniqueName: \"kubernetes.io/projected/50c132e0-fec5-40ca-9f09-8fbe39844bd6-kube-api-access-ntjxv\") pod \"openstack-operator-index-lrntz\" (UID: \"50c132e0-fec5-40ca-9f09-8fbe39844bd6\") " pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:39 crc kubenswrapper[4822]: I1201 07:06:39.114025 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntjxv\" (UniqueName: \"kubernetes.io/projected/50c132e0-fec5-40ca-9f09-8fbe39844bd6-kube-api-access-ntjxv\") pod \"openstack-operator-index-lrntz\" (UID: \"50c132e0-fec5-40ca-9f09-8fbe39844bd6\") " pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:39 crc kubenswrapper[4822]: I1201 07:06:39.153803 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntjxv\" (UniqueName: \"kubernetes.io/projected/50c132e0-fec5-40ca-9f09-8fbe39844bd6-kube-api-access-ntjxv\") pod \"openstack-operator-index-lrntz\" (UID: \"50c132e0-fec5-40ca-9f09-8fbe39844bd6\") " pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:39 crc kubenswrapper[4822]: I1201 07:06:39.206746 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:39 crc kubenswrapper[4822]: I1201 07:06:39.817878 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-sq89x" podUID="85030b09-4363-489f-8299-65c5e08b4571" containerName="registry-server" containerID="cri-o://eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc" gracePeriod=2 Dec 01 07:06:41 crc kubenswrapper[4822]: I1201 07:06:41.211350 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-lrntz"] Dec 01 07:06:41 crc kubenswrapper[4822]: W1201 07:06:41.240094 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50c132e0_fec5_40ca_9f09_8fbe39844bd6.slice/crio-96112b1501085cf777c9d315c7090764e46568b7c153411404638ecaff45924d WatchSource:0}: Error finding container 96112b1501085cf777c9d315c7090764e46568b7c153411404638ecaff45924d: Status 404 returned error can't find the container with id 96112b1501085cf777c9d315c7090764e46568b7c153411404638ecaff45924d Dec 01 07:06:41 crc kubenswrapper[4822]: I1201 07:06:41.989949 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.170977 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5jq4\" (UniqueName: \"kubernetes.io/projected/85030b09-4363-489f-8299-65c5e08b4571-kube-api-access-w5jq4\") pod \"85030b09-4363-489f-8299-65c5e08b4571\" (UID: \"85030b09-4363-489f-8299-65c5e08b4571\") " Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.178652 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85030b09-4363-489f-8299-65c5e08b4571-kube-api-access-w5jq4" (OuterVolumeSpecName: "kube-api-access-w5jq4") pod "85030b09-4363-489f-8299-65c5e08b4571" (UID: "85030b09-4363-489f-8299-65c5e08b4571"). InnerVolumeSpecName "kube-api-access-w5jq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.197128 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-lrntz" event={"ID":"50c132e0-fec5-40ca-9f09-8fbe39844bd6","Type":"ContainerStarted","Data":"438b474582fee0db47609c1a61eb24791f99960d84b88ac9cb3fbea6748f7c62"} Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.197176 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-lrntz" event={"ID":"50c132e0-fec5-40ca-9f09-8fbe39844bd6","Type":"ContainerStarted","Data":"96112b1501085cf777c9d315c7090764e46568b7c153411404638ecaff45924d"} Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.199625 4822 generic.go:334] "Generic (PLEG): container finished" podID="85030b09-4363-489f-8299-65c5e08b4571" containerID="eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc" exitCode=0 Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.199699 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sq89x" event={"ID":"85030b09-4363-489f-8299-65c5e08b4571","Type":"ContainerDied","Data":"eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc"} Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.199755 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sq89x" event={"ID":"85030b09-4363-489f-8299-65c5e08b4571","Type":"ContainerDied","Data":"c24314527faaf215ffb2bb2c9ba586474645f10da3612fa4e7c1ba0ef5ae2fea"} Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.199781 4822 scope.go:117] "RemoveContainer" containerID="eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.200001 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sq89x" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.236372 4822 scope.go:117] "RemoveContainer" containerID="eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc" Dec 01 07:06:42 crc kubenswrapper[4822]: E1201 07:06:42.237917 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc\": container with ID starting with eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc not found: ID does not exist" containerID="eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.237964 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc"} err="failed to get container status \"eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc\": rpc error: code = NotFound desc = could not find container \"eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc\": container with ID starting with eb7c5193347a332e536ce1e22127a42e4905c7051ca163cfe8c2a4d903aa5efc not found: ID does not exist" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.254357 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-lrntz" podStartSLOduration=3.780836581 podStartE2EDuration="4.254305947s" podCreationTimestamp="2025-12-01 07:06:38 +0000 UTC" firstStartedPulling="2025-12-01 07:06:41.249635876 +0000 UTC m=+956.570443562" lastFinishedPulling="2025-12-01 07:06:41.723105232 +0000 UTC m=+957.043912928" observedRunningTime="2025-12-01 07:06:42.219320286 +0000 UTC m=+957.540128012" watchObservedRunningTime="2025-12-01 07:06:42.254305947 +0000 UTC m=+957.575113633" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.259330 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sq89x"] Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.263878 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-sq89x"] Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.272422 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5jq4\" (UniqueName: \"kubernetes.io/projected/85030b09-4363-489f-8299-65c5e08b4571-kube-api-access-w5jq4\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.542592 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.542666 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.542736 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.543769 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6baf3d5e41a621d0e59cbb384ffe06f0de93d5916d8dc51ecf89a3a235ed2c54"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.543878 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://6baf3d5e41a621d0e59cbb384ffe06f0de93d5916d8dc51ecf89a3a235ed2c54" gracePeriod=600 Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.864643 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r5nnn"] Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.865621 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r5nnn" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="registry-server" containerID="cri-o://220f987cbf462aa36f867b793c48e801b091c2b27c7c629ea36d9ecd083354d5" gracePeriod=2 Dec 01 07:06:42 crc kubenswrapper[4822]: I1201 07:06:42.959371 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85030b09-4363-489f-8299-65c5e08b4571" path="/var/lib/kubelet/pods/85030b09-4363-489f-8299-65c5e08b4571/volumes" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.210948 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="6baf3d5e41a621d0e59cbb384ffe06f0de93d5916d8dc51ecf89a3a235ed2c54" exitCode=0 Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.211037 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"6baf3d5e41a621d0e59cbb384ffe06f0de93d5916d8dc51ecf89a3a235ed2c54"} Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.211403 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"9f465aacecd8042506a98ec45b2a943f80875679afbe9996cf6b8ba8198e3f7e"} Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.211427 4822 scope.go:117] "RemoveContainer" containerID="338b930b0a920daac3d47f96c28535256b673fb917325f97766ffd05e922ff93" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.214892 4822 generic.go:334] "Generic (PLEG): container finished" podID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerID="220f987cbf462aa36f867b793c48e801b091c2b27c7c629ea36d9ecd083354d5" exitCode=0 Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.214980 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r5nnn" event={"ID":"81d57209-c2f8-48a0-9ce9-b45972e743f8","Type":"ContainerDied","Data":"220f987cbf462aa36f867b793c48e801b091c2b27c7c629ea36d9ecd083354d5"} Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.278212 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.287811 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-catalog-content\") pod \"81d57209-c2f8-48a0-9ce9-b45972e743f8\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.287932 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv5cj\" (UniqueName: \"kubernetes.io/projected/81d57209-c2f8-48a0-9ce9-b45972e743f8-kube-api-access-fv5cj\") pod \"81d57209-c2f8-48a0-9ce9-b45972e743f8\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.287978 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-utilities\") pod \"81d57209-c2f8-48a0-9ce9-b45972e743f8\" (UID: \"81d57209-c2f8-48a0-9ce9-b45972e743f8\") " Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.288855 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-utilities" (OuterVolumeSpecName: "utilities") pod "81d57209-c2f8-48a0-9ce9-b45972e743f8" (UID: "81d57209-c2f8-48a0-9ce9-b45972e743f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.297666 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81d57209-c2f8-48a0-9ce9-b45972e743f8-kube-api-access-fv5cj" (OuterVolumeSpecName: "kube-api-access-fv5cj") pod "81d57209-c2f8-48a0-9ce9-b45972e743f8" (UID: "81d57209-c2f8-48a0-9ce9-b45972e743f8"). InnerVolumeSpecName "kube-api-access-fv5cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.338083 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81d57209-c2f8-48a0-9ce9-b45972e743f8" (UID: "81d57209-c2f8-48a0-9ce9-b45972e743f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.389263 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv5cj\" (UniqueName: \"kubernetes.io/projected/81d57209-c2f8-48a0-9ce9-b45972e743f8-kube-api-access-fv5cj\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.389297 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:43 crc kubenswrapper[4822]: I1201 07:06:43.389311 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81d57209-c2f8-48a0-9ce9-b45972e743f8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.229412 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r5nnn" event={"ID":"81d57209-c2f8-48a0-9ce9-b45972e743f8","Type":"ContainerDied","Data":"8c2c538c83d8fe6e1580511ffc80200c66c4285fd05c3684ab0b782280bf8ebf"} Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.229487 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r5nnn" Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.229950 4822 scope.go:117] "RemoveContainer" containerID="220f987cbf462aa36f867b793c48e801b091c2b27c7c629ea36d9ecd083354d5" Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.270767 4822 scope.go:117] "RemoveContainer" containerID="8649b7b5e950f72bf47f69d76b227e896d0b014fda18e7ad7f80ce13c14b0be2" Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.282588 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r5nnn"] Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.289150 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r5nnn"] Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.293457 4822 scope.go:117] "RemoveContainer" containerID="000068f91c420e257668651accd7a1edaf5f3acf364b91b8c2c2a52f783f6345" Dec 01 07:06:44 crc kubenswrapper[4822]: I1201 07:06:44.981114 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" path="/var/lib/kubelet/pods/81d57209-c2f8-48a0-9ce9-b45972e743f8/volumes" Dec 01 07:06:49 crc kubenswrapper[4822]: I1201 07:06:49.207364 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:49 crc kubenswrapper[4822]: I1201 07:06:49.208360 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:49 crc kubenswrapper[4822]: I1201 07:06:49.249289 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:49 crc kubenswrapper[4822]: I1201 07:06:49.321618 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-lrntz" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.140180 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8"] Dec 01 07:06:51 crc kubenswrapper[4822]: E1201 07:06:51.140692 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="extract-content" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.140721 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="extract-content" Dec 01 07:06:51 crc kubenswrapper[4822]: E1201 07:06:51.140771 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="registry-server" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.140789 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="registry-server" Dec 01 07:06:51 crc kubenswrapper[4822]: E1201 07:06:51.140822 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="extract-utilities" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.140839 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="extract-utilities" Dec 01 07:06:51 crc kubenswrapper[4822]: E1201 07:06:51.140871 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85030b09-4363-489f-8299-65c5e08b4571" containerName="registry-server" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.140888 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="85030b09-4363-489f-8299-65c5e08b4571" containerName="registry-server" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.141165 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="85030b09-4363-489f-8299-65c5e08b4571" containerName="registry-server" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.141207 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d57209-c2f8-48a0-9ce9-b45972e743f8" containerName="registry-server" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.143299 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.147020 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t5bmh" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.158529 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8"] Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.215772 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.216210 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldjp5\" (UniqueName: \"kubernetes.io/projected/d736d36e-1452-4556-ab66-9ef1a6c14828-kube-api-access-ldjp5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.216378 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.317817 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.317883 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.317936 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldjp5\" (UniqueName: \"kubernetes.io/projected/d736d36e-1452-4556-ab66-9ef1a6c14828-kube-api-access-ldjp5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.318545 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.318797 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.354286 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldjp5\" (UniqueName: \"kubernetes.io/projected/d736d36e-1452-4556-ab66-9ef1a6c14828-kube-api-access-ldjp5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.472451 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:51 crc kubenswrapper[4822]: I1201 07:06:51.814025 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8"] Dec 01 07:06:52 crc kubenswrapper[4822]: I1201 07:06:52.313168 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" event={"ID":"d736d36e-1452-4556-ab66-9ef1a6c14828","Type":"ContainerDied","Data":"2bf053ff6f7fa31baa68ed6a934d9862a58719929a5612f57b199d4ad5b2c9f3"} Dec 01 07:06:52 crc kubenswrapper[4822]: I1201 07:06:52.313020 4822 generic.go:334] "Generic (PLEG): container finished" podID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerID="2bf053ff6f7fa31baa68ed6a934d9862a58719929a5612f57b199d4ad5b2c9f3" exitCode=0 Dec 01 07:06:52 crc kubenswrapper[4822]: I1201 07:06:52.313781 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" event={"ID":"d736d36e-1452-4556-ab66-9ef1a6c14828","Type":"ContainerStarted","Data":"cbac86a64a160bba33b25ac103f0c4499228fe6cd8c742106a841a999d634ada"} Dec 01 07:06:53 crc kubenswrapper[4822]: I1201 07:06:53.322413 4822 generic.go:334] "Generic (PLEG): container finished" podID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerID="510cdd4b2f959ca0fa6e17585b36aea20b359760680ec0e37e9b959c2284ccde" exitCode=0 Dec 01 07:06:53 crc kubenswrapper[4822]: I1201 07:06:53.322515 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" event={"ID":"d736d36e-1452-4556-ab66-9ef1a6c14828","Type":"ContainerDied","Data":"510cdd4b2f959ca0fa6e17585b36aea20b359760680ec0e37e9b959c2284ccde"} Dec 01 07:06:54 crc kubenswrapper[4822]: I1201 07:06:54.331338 4822 generic.go:334] "Generic (PLEG): container finished" podID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerID="fa91fb37fbfb351b7fa316b9ef9ba99534e7edf2ca7795a92f8e050a1e772f00" exitCode=0 Dec 01 07:06:54 crc kubenswrapper[4822]: I1201 07:06:54.331534 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" event={"ID":"d736d36e-1452-4556-ab66-9ef1a6c14828","Type":"ContainerDied","Data":"fa91fb37fbfb351b7fa316b9ef9ba99534e7edf2ca7795a92f8e050a1e772f00"} Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.758232 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.785166 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldjp5\" (UniqueName: \"kubernetes.io/projected/d736d36e-1452-4556-ab66-9ef1a6c14828-kube-api-access-ldjp5\") pod \"d736d36e-1452-4556-ab66-9ef1a6c14828\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.785746 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-util\") pod \"d736d36e-1452-4556-ab66-9ef1a6c14828\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.785793 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-bundle\") pod \"d736d36e-1452-4556-ab66-9ef1a6c14828\" (UID: \"d736d36e-1452-4556-ab66-9ef1a6c14828\") " Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.786426 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-bundle" (OuterVolumeSpecName: "bundle") pod "d736d36e-1452-4556-ab66-9ef1a6c14828" (UID: "d736d36e-1452-4556-ab66-9ef1a6c14828"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.795374 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d736d36e-1452-4556-ab66-9ef1a6c14828-kube-api-access-ldjp5" (OuterVolumeSpecName: "kube-api-access-ldjp5") pod "d736d36e-1452-4556-ab66-9ef1a6c14828" (UID: "d736d36e-1452-4556-ab66-9ef1a6c14828"). InnerVolumeSpecName "kube-api-access-ldjp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.800856 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-util" (OuterVolumeSpecName: "util") pod "d736d36e-1452-4556-ab66-9ef1a6c14828" (UID: "d736d36e-1452-4556-ab66-9ef1a6c14828"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.887215 4822 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-util\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.887269 4822 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d736d36e-1452-4556-ab66-9ef1a6c14828-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:55 crc kubenswrapper[4822]: I1201 07:06:55.887290 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldjp5\" (UniqueName: \"kubernetes.io/projected/d736d36e-1452-4556-ab66-9ef1a6c14828-kube-api-access-ldjp5\") on node \"crc\" DevicePath \"\"" Dec 01 07:06:56 crc kubenswrapper[4822]: I1201 07:06:56.354650 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" event={"ID":"d736d36e-1452-4556-ab66-9ef1a6c14828","Type":"ContainerDied","Data":"cbac86a64a160bba33b25ac103f0c4499228fe6cd8c742106a841a999d634ada"} Dec 01 07:06:56 crc kubenswrapper[4822]: I1201 07:06:56.354743 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbac86a64a160bba33b25ac103f0c4499228fe6cd8c742106a841a999d634ada" Dec 01 07:06:56 crc kubenswrapper[4822]: I1201 07:06:56.354927 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.381137 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2"] Dec 01 07:06:58 crc kubenswrapper[4822]: E1201 07:06:58.381783 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="util" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.381803 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="util" Dec 01 07:06:58 crc kubenswrapper[4822]: E1201 07:06:58.381819 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="pull" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.381828 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="pull" Dec 01 07:06:58 crc kubenswrapper[4822]: E1201 07:06:58.381849 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="extract" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.381858 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="extract" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.381994 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d736d36e-1452-4556-ab66-9ef1a6c14828" containerName="extract" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.382409 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.384688 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qc9qp" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.410875 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2"] Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.523194 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7459w\" (UniqueName: \"kubernetes.io/projected/d756e5cf-2fd3-4c62-8b35-a9e65a3b1073-kube-api-access-7459w\") pod \"openstack-operator-controller-operator-6ddddd9d6f-5gtb2\" (UID: \"d756e5cf-2fd3-4c62-8b35-a9e65a3b1073\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.624180 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7459w\" (UniqueName: \"kubernetes.io/projected/d756e5cf-2fd3-4c62-8b35-a9e65a3b1073-kube-api-access-7459w\") pod \"openstack-operator-controller-operator-6ddddd9d6f-5gtb2\" (UID: \"d756e5cf-2fd3-4c62-8b35-a9e65a3b1073\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.645385 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7459w\" (UniqueName: \"kubernetes.io/projected/d756e5cf-2fd3-4c62-8b35-a9e65a3b1073-kube-api-access-7459w\") pod \"openstack-operator-controller-operator-6ddddd9d6f-5gtb2\" (UID: \"d756e5cf-2fd3-4c62-8b35-a9e65a3b1073\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:06:58 crc kubenswrapper[4822]: I1201 07:06:58.697674 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:06:59 crc kubenswrapper[4822]: I1201 07:06:59.156932 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2"] Dec 01 07:06:59 crc kubenswrapper[4822]: W1201 07:06:59.164146 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd756e5cf_2fd3_4c62_8b35_a9e65a3b1073.slice/crio-b79bb639585074627f58475578271c3b1cf23474da045d08e12479a9d353ec68 WatchSource:0}: Error finding container b79bb639585074627f58475578271c3b1cf23474da045d08e12479a9d353ec68: Status 404 returned error can't find the container with id b79bb639585074627f58475578271c3b1cf23474da045d08e12479a9d353ec68 Dec 01 07:06:59 crc kubenswrapper[4822]: I1201 07:06:59.379386 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" event={"ID":"d756e5cf-2fd3-4c62-8b35-a9e65a3b1073","Type":"ContainerStarted","Data":"b79bb639585074627f58475578271c3b1cf23474da045d08e12479a9d353ec68"} Dec 01 07:07:05 crc kubenswrapper[4822]: I1201 07:07:05.420902 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" event={"ID":"d756e5cf-2fd3-4c62-8b35-a9e65a3b1073","Type":"ContainerStarted","Data":"e5ccc1ff6c5f2d3169939279c92b5b86311d1feb66a34f0f95c63b3226dc400d"} Dec 01 07:07:05 crc kubenswrapper[4822]: I1201 07:07:05.421677 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:07:05 crc kubenswrapper[4822]: I1201 07:07:05.488530 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" podStartSLOduration=2.34450942 podStartE2EDuration="7.488506927s" podCreationTimestamp="2025-12-01 07:06:58 +0000 UTC" firstStartedPulling="2025-12-01 07:06:59.165418981 +0000 UTC m=+974.486226667" lastFinishedPulling="2025-12-01 07:07:04.309416488 +0000 UTC m=+979.630224174" observedRunningTime="2025-12-01 07:07:05.483161547 +0000 UTC m=+980.803969273" watchObservedRunningTime="2025-12-01 07:07:05.488506927 +0000 UTC m=+980.809314653" Dec 01 07:07:18 crc kubenswrapper[4822]: I1201 07:07:18.718607 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-5gtb2" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.336857 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.340145 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.341894 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-9bmrw" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.347127 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.371524 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.373106 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.383205 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-xdgjh" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.389652 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.391591 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.394592 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.396508 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.397080 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-h57tm" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.399874 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-6w7wb" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.400455 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.440167 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.458010 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.463325 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.464561 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.468372 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlw45\" (UniqueName: \"kubernetes.io/projected/fd0abaec-7f45-438f-843e-1a1dd2cbf841-kube-api-access-wlw45\") pod \"barbican-operator-controller-manager-7d9dfd778-f4bff\" (UID: \"fd0abaec-7f45-438f-843e-1a1dd2cbf841\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.468439 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fhmx\" (UniqueName: \"kubernetes.io/projected/43d47ccc-afef-42e7-bc18-df3be5e2b4e0-kube-api-access-8fhmx\") pod \"cinder-operator-controller-manager-859b6ccc6-pbb9v\" (UID: \"43d47ccc-afef-42e7-bc18-df3be5e2b4e0\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.468483 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl99m\" (UniqueName: \"kubernetes.io/projected/adf58c74-4460-490a-97bb-a2d60a6efffa-kube-api-access-vl99m\") pod \"designate-operator-controller-manager-78b4bc895b-4xwss\" (UID: \"adf58c74-4460-490a-97bb-a2d60a6efffa\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.468542 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsf78\" (UniqueName: \"kubernetes.io/projected/031f17da-46a5-4904-b32c-968dbd5959c1-kube-api-access-tsf78\") pod \"glance-operator-controller-manager-668d9c48b9-fkvck\" (UID: \"031f17da-46a5-4904-b32c-968dbd5959c1\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.468797 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-s8d4q" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.486344 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.487531 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.497026 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-dc8wf" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.500562 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.520851 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.528858 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.533170 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.535750 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-9cn6w" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.564976 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569411 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlw45\" (UniqueName: \"kubernetes.io/projected/fd0abaec-7f45-438f-843e-1a1dd2cbf841-kube-api-access-wlw45\") pod \"barbican-operator-controller-manager-7d9dfd778-f4bff\" (UID: \"fd0abaec-7f45-438f-843e-1a1dd2cbf841\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569466 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569497 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fhmx\" (UniqueName: \"kubernetes.io/projected/43d47ccc-afef-42e7-bc18-df3be5e2b4e0-kube-api-access-8fhmx\") pod \"cinder-operator-controller-manager-859b6ccc6-pbb9v\" (UID: \"43d47ccc-afef-42e7-bc18-df3be5e2b4e0\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569520 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl99m\" (UniqueName: \"kubernetes.io/projected/adf58c74-4460-490a-97bb-a2d60a6efffa-kube-api-access-vl99m\") pod \"designate-operator-controller-manager-78b4bc895b-4xwss\" (UID: \"adf58c74-4460-490a-97bb-a2d60a6efffa\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569572 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvxq5\" (UniqueName: \"kubernetes.io/projected/85f27b60-f694-4768-b55e-bb816ed4594b-kube-api-access-rvxq5\") pod \"heat-operator-controller-manager-5f64f6f8bb-275w4\" (UID: \"85f27b60-f694-4768-b55e-bb816ed4594b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569596 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx6lh\" (UniqueName: \"kubernetes.io/projected/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-kube-api-access-qx6lh\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569647 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsf78\" (UniqueName: \"kubernetes.io/projected/031f17da-46a5-4904-b32c-968dbd5959c1-kube-api-access-tsf78\") pod \"glance-operator-controller-manager-668d9c48b9-fkvck\" (UID: \"031f17da-46a5-4904-b32c-968dbd5959c1\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.569674 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvwt7\" (UniqueName: \"kubernetes.io/projected/b6e7914c-28b1-4241-9db9-ebecda9ede7a-kube-api-access-zvwt7\") pod \"horizon-operator-controller-manager-68c6d99b8f-t2t9n\" (UID: \"b6e7914c-28b1-4241-9db9-ebecda9ede7a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.575429 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.593000 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.595656 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.598242 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-vkn2t" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.599443 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlw45\" (UniqueName: \"kubernetes.io/projected/fd0abaec-7f45-438f-843e-1a1dd2cbf841-kube-api-access-wlw45\") pod \"barbican-operator-controller-manager-7d9dfd778-f4bff\" (UID: \"fd0abaec-7f45-438f-843e-1a1dd2cbf841\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.599479 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl99m\" (UniqueName: \"kubernetes.io/projected/adf58c74-4460-490a-97bb-a2d60a6efffa-kube-api-access-vl99m\") pod \"designate-operator-controller-manager-78b4bc895b-4xwss\" (UID: \"adf58c74-4460-490a-97bb-a2d60a6efffa\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.599882 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fhmx\" (UniqueName: \"kubernetes.io/projected/43d47ccc-afef-42e7-bc18-df3be5e2b4e0-kube-api-access-8fhmx\") pod \"cinder-operator-controller-manager-859b6ccc6-pbb9v\" (UID: \"43d47ccc-afef-42e7-bc18-df3be5e2b4e0\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.600588 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsf78\" (UniqueName: \"kubernetes.io/projected/031f17da-46a5-4904-b32c-968dbd5959c1-kube-api-access-tsf78\") pod \"glance-operator-controller-manager-668d9c48b9-fkvck\" (UID: \"031f17da-46a5-4904-b32c-968dbd5959c1\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.618075 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.634612 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.635743 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.639689 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bq2t9" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.646405 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.653738 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.655460 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.658999 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-8xvjv" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.660643 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.671675 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.672709 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.673261 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvwt7\" (UniqueName: \"kubernetes.io/projected/b6e7914c-28b1-4241-9db9-ebecda9ede7a-kube-api-access-zvwt7\") pod \"horizon-operator-controller-manager-68c6d99b8f-t2t9n\" (UID: \"b6e7914c-28b1-4241-9db9-ebecda9ede7a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.673385 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.673625 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qbsp\" (UniqueName: \"kubernetes.io/projected/ea009d0a-d9c2-4265-96bf-6153ce222eef-kube-api-access-5qbsp\") pod \"keystone-operator-controller-manager-546d4bdf48-m8rnk\" (UID: \"ea009d0a-d9c2-4265-96bf-6153ce222eef\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.673744 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh62p\" (UniqueName: \"kubernetes.io/projected/ee5199ce-b0f1-4753-a317-8d4b95bca11b-kube-api-access-zh62p\") pod \"ironic-operator-controller-manager-6c548fd776-k72b6\" (UID: \"ee5199ce-b0f1-4753-a317-8d4b95bca11b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.673823 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvxq5\" (UniqueName: \"kubernetes.io/projected/85f27b60-f694-4768-b55e-bb816ed4594b-kube-api-access-rvxq5\") pod \"heat-operator-controller-manager-5f64f6f8bb-275w4\" (UID: \"85f27b60-f694-4768-b55e-bb816ed4594b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.673918 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx6lh\" (UniqueName: \"kubernetes.io/projected/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-kube-api-access-qx6lh\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:37 crc kubenswrapper[4822]: E1201 07:07:37.674451 4822 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:37 crc kubenswrapper[4822]: E1201 07:07:37.674589 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert podName:9f7f1540-19b0-48c0-adab-c10e8bdd0fd3 nodeName:}" failed. No retries permitted until 2025-12-01 07:07:38.17457067 +0000 UTC m=+1013.495378356 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert") pod "infra-operator-controller-manager-57548d458d-2s7q5" (UID: "9f7f1540-19b0-48c0-adab-c10e8bdd0fd3") : secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.676901 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-2d458" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.686930 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.689524 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.690632 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.691974 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-rpgnt" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.714618 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.719302 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx6lh\" (UniqueName: \"kubernetes.io/projected/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-kube-api-access-qx6lh\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.725741 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvwt7\" (UniqueName: \"kubernetes.io/projected/b6e7914c-28b1-4241-9db9-ebecda9ede7a-kube-api-access-zvwt7\") pod \"horizon-operator-controller-manager-68c6d99b8f-t2t9n\" (UID: \"b6e7914c-28b1-4241-9db9-ebecda9ede7a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.734275 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvxq5\" (UniqueName: \"kubernetes.io/projected/85f27b60-f694-4768-b55e-bb816ed4594b-kube-api-access-rvxq5\") pod \"heat-operator-controller-manager-5f64f6f8bb-275w4\" (UID: \"85f27b60-f694-4768-b55e-bb816ed4594b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.739942 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.741069 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.747666 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.756812 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.773530 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.783416 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.789006 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-lj2xq" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.812706 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh62p\" (UniqueName: \"kubernetes.io/projected/ee5199ce-b0f1-4753-a317-8d4b95bca11b-kube-api-access-zh62p\") pod \"ironic-operator-controller-manager-6c548fd776-k72b6\" (UID: \"ee5199ce-b0f1-4753-a317-8d4b95bca11b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.813383 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llnvj\" (UniqueName: \"kubernetes.io/projected/7fd09048-e506-48e1-9d30-01cbd6117fcc-kube-api-access-llnvj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-mbsh5\" (UID: \"7fd09048-e506-48e1-9d30-01cbd6117fcc\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.813721 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcxrm\" (UniqueName: \"kubernetes.io/projected/2ed3d718-e591-45b9-9ae7-f6ed765afa35-kube-api-access-lcxrm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-2vpl9\" (UID: \"2ed3d718-e591-45b9-9ae7-f6ed765afa35\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.814181 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qbsp\" (UniqueName: \"kubernetes.io/projected/ea009d0a-d9c2-4265-96bf-6153ce222eef-kube-api-access-5qbsp\") pod \"keystone-operator-controller-manager-546d4bdf48-m8rnk\" (UID: \"ea009d0a-d9c2-4265-96bf-6153ce222eef\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.814825 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swmzk\" (UniqueName: \"kubernetes.io/projected/af698804-e3b6-481d-bd2f-34350bae1b8f-kube-api-access-swmzk\") pod \"manila-operator-controller-manager-6546668bfd-vlj27\" (UID: \"af698804-e3b6-481d-bd2f-34350bae1b8f\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.835109 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.836102 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.854599 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qbsp\" (UniqueName: \"kubernetes.io/projected/ea009d0a-d9c2-4265-96bf-6153ce222eef-kube-api-access-5qbsp\") pod \"keystone-operator-controller-manager-546d4bdf48-m8rnk\" (UID: \"ea009d0a-d9c2-4265-96bf-6153ce222eef\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.876422 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh62p\" (UniqueName: \"kubernetes.io/projected/ee5199ce-b0f1-4753-a317-8d4b95bca11b-kube-api-access-zh62p\") pod \"ironic-operator-controller-manager-6c548fd776-k72b6\" (UID: \"ee5199ce-b0f1-4753-a317-8d4b95bca11b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.921480 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.925668 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcxrm\" (UniqueName: \"kubernetes.io/projected/2ed3d718-e591-45b9-9ae7-f6ed765afa35-kube-api-access-lcxrm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-2vpl9\" (UID: \"2ed3d718-e591-45b9-9ae7-f6ed765afa35\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.925705 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj2ph\" (UniqueName: \"kubernetes.io/projected/4a0f6236-18f0-436a-9544-d76b8d1c3a09-kube-api-access-gj2ph\") pod \"nova-operator-controller-manager-697bc559fc-49rk7\" (UID: \"4a0f6236-18f0-436a-9544-d76b8d1c3a09\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.925800 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swmzk\" (UniqueName: \"kubernetes.io/projected/af698804-e3b6-481d-bd2f-34350bae1b8f-kube-api-access-swmzk\") pod \"manila-operator-controller-manager-6546668bfd-vlj27\" (UID: \"af698804-e3b6-481d-bd2f-34350bae1b8f\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.925829 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llnvj\" (UniqueName: \"kubernetes.io/projected/7fd09048-e506-48e1-9d30-01cbd6117fcc-kube-api-access-llnvj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-mbsh5\" (UID: \"7fd09048-e506-48e1-9d30-01cbd6117fcc\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.938147 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-6shpm"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.961382 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llnvj\" (UniqueName: \"kubernetes.io/projected/7fd09048-e506-48e1-9d30-01cbd6117fcc-kube-api-access-llnvj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-mbsh5\" (UID: \"7fd09048-e506-48e1-9d30-01cbd6117fcc\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.967504 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcxrm\" (UniqueName: \"kubernetes.io/projected/2ed3d718-e591-45b9-9ae7-f6ed765afa35-kube-api-access-lcxrm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-2vpl9\" (UID: \"2ed3d718-e591-45b9-9ae7-f6ed765afa35\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.970180 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swmzk\" (UniqueName: \"kubernetes.io/projected/af698804-e3b6-481d-bd2f-34350bae1b8f-kube-api-access-swmzk\") pod \"manila-operator-controller-manager-6546668bfd-vlj27\" (UID: \"af698804-e3b6-481d-bd2f-34350bae1b8f\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.973848 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.985198 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-6shpm"] Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.986353 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:07:37 crc kubenswrapper[4822]: I1201 07:07:37.996870 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-czgx9" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.005373 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.006494 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.014780 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.022949 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.023472 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-m5sx8" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.031651 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj2ph\" (UniqueName: \"kubernetes.io/projected/4a0f6236-18f0-436a-9544-d76b8d1c3a09-kube-api-access-gj2ph\") pod \"nova-operator-controller-manager-697bc559fc-49rk7\" (UID: \"4a0f6236-18f0-436a-9544-d76b8d1c3a09\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.040417 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.047292 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.048629 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.053581 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-qcdq5" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.054311 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj2ph\" (UniqueName: \"kubernetes.io/projected/4a0f6236-18f0-436a-9544-d76b8d1c3a09-kube-api-access-gj2ph\") pod \"nova-operator-controller-manager-697bc559fc-49rk7\" (UID: \"4a0f6236-18f0-436a-9544-d76b8d1c3a09\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.080289 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.098963 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.100671 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.113973 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.117376 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.118866 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.125687 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.127143 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.133564 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.133733 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh5xt\" (UniqueName: \"kubernetes.io/projected/44bcc489-7661-42ff-b164-4bc2fea1a426-kube-api-access-hh5xt\") pod \"ovn-operator-controller-manager-b6456fdb6-dmqx6\" (UID: \"44bcc489-7661-42ff-b164-4bc2fea1a426\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.133843 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr6xb\" (UniqueName: \"kubernetes.io/projected/825c452f-e271-42a4-ba90-d16f50140303-kube-api-access-cr6xb\") pod \"octavia-operator-controller-manager-998648c74-6shpm\" (UID: \"825c452f-e271-42a4-ba90-d16f50140303\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.133954 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k7fp\" (UniqueName: \"kubernetes.io/projected/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-kube-api-access-6k7fp\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.147669 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.148159 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.156944 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.169257 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mtshp"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.171242 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mtshp"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.171348 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.174113 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.179320 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.180684 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.185698 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.207091 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-25d4s" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.207241 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-rnw2n" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.207340 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-p9skp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.207485 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-wj629" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.207519 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-42mbp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.227627 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.228542 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.231654 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7kktl" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.231891 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.231999 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.242449 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.242847 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.242967 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh5xt\" (UniqueName: \"kubernetes.io/projected/44bcc489-7661-42ff-b164-4bc2fea1a426-kube-api-access-hh5xt\") pod \"ovn-operator-controller-manager-b6456fdb6-dmqx6\" (UID: \"44bcc489-7661-42ff-b164-4bc2fea1a426\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.243082 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk5c9\" (UniqueName: \"kubernetes.io/projected/3ebc517a-dfe9-4462-b92d-b381d254f028-kube-api-access-sk5c9\") pod \"swift-operator-controller-manager-5f8c65bbfc-wxp4b\" (UID: \"3ebc517a-dfe9-4462-b92d-b381d254f028\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.243194 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr6xb\" (UniqueName: \"kubernetes.io/projected/825c452f-e271-42a4-ba90-d16f50140303-kube-api-access-cr6xb\") pod \"octavia-operator-controller-manager-998648c74-6shpm\" (UID: \"825c452f-e271-42a4-ba90-d16f50140303\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.243234 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.243316 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k7fp\" (UniqueName: \"kubernetes.io/projected/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-kube-api-access-6k7fp\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.243435 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x99rc\" (UniqueName: \"kubernetes.io/projected/38214819-c905-4c30-8c6c-e8ea8978656f-kube-api-access-x99rc\") pod \"placement-operator-controller-manager-78f8948974-6wz2z\" (UID: \"38214819-c905-4c30-8c6c-e8ea8978656f\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.244637 4822 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.244690 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert podName:f8f4cc9b-6768-4f0c-a82e-d4a831291fee nodeName:}" failed. No retries permitted until 2025-12-01 07:07:38.744672275 +0000 UTC m=+1014.065479961 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" (UID: "f8f4cc9b-6768-4f0c-a82e-d4a831291fee") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.245226 4822 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.245290 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert podName:9f7f1540-19b0-48c0-adab-c10e8bdd0fd3 nodeName:}" failed. No retries permitted until 2025-12-01 07:07:39.245267672 +0000 UTC m=+1014.566075358 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert") pod "infra-operator-controller-manager-57548d458d-2s7q5" (UID: "9f7f1540-19b0-48c0-adab-c10e8bdd0fd3") : secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.246021 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mr8bc\" (UniqueName: \"kubernetes.io/projected/d94674cd-a8c3-4db7-acb1-2a9965fd85e0-kube-api-access-mr8bc\") pod \"test-operator-controller-manager-5854674fcc-mtshp\" (UID: \"d94674cd-a8c3-4db7-acb1-2a9965fd85e0\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.246086 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhlnj\" (UniqueName: \"kubernetes.io/projected/157e4e2d-79c0-42c6-9dd5-a2669945d731-kube-api-access-mhlnj\") pod \"telemetry-operator-controller-manager-76cc84c6bb-zgl8s\" (UID: \"157e4e2d-79c0-42c6-9dd5-a2669945d731\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.251012 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.258267 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.295840 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr6xb\" (UniqueName: \"kubernetes.io/projected/825c452f-e271-42a4-ba90-d16f50140303-kube-api-access-cr6xb\") pod \"octavia-operator-controller-manager-998648c74-6shpm\" (UID: \"825c452f-e271-42a4-ba90-d16f50140303\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.322085 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k7fp\" (UniqueName: \"kubernetes.io/projected/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-kube-api-access-6k7fp\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.347691 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh5xt\" (UniqueName: \"kubernetes.io/projected/44bcc489-7661-42ff-b164-4bc2fea1a426-kube-api-access-hh5xt\") pod \"ovn-operator-controller-manager-b6456fdb6-dmqx6\" (UID: \"44bcc489-7661-42ff-b164-4bc2fea1a426\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.348062 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk5c9\" (UniqueName: \"kubernetes.io/projected/3ebc517a-dfe9-4462-b92d-b381d254f028-kube-api-access-sk5c9\") pod \"swift-operator-controller-manager-5f8c65bbfc-wxp4b\" (UID: \"3ebc517a-dfe9-4462-b92d-b381d254f028\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350297 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350350 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bbfs\" (UniqueName: \"kubernetes.io/projected/d169afb8-577e-430e-84b1-98f34bdbec2c-kube-api-access-8bbfs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350384 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350450 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7mp7\" (UniqueName: \"kubernetes.io/projected/0778e4fe-6293-4282-9978-a2838469affe-kube-api-access-b7mp7\") pod \"watcher-operator-controller-manager-769dc69bc-8pmrk\" (UID: \"0778e4fe-6293-4282-9978-a2838469affe\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350478 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x99rc\" (UniqueName: \"kubernetes.io/projected/38214819-c905-4c30-8c6c-e8ea8978656f-kube-api-access-x99rc\") pod \"placement-operator-controller-manager-78f8948974-6wz2z\" (UID: \"38214819-c905-4c30-8c6c-e8ea8978656f\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350508 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mr8bc\" (UniqueName: \"kubernetes.io/projected/d94674cd-a8c3-4db7-acb1-2a9965fd85e0-kube-api-access-mr8bc\") pod \"test-operator-controller-manager-5854674fcc-mtshp\" (UID: \"d94674cd-a8c3-4db7-acb1-2a9965fd85e0\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.350530 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhlnj\" (UniqueName: \"kubernetes.io/projected/157e4e2d-79c0-42c6-9dd5-a2669945d731-kube-api-access-mhlnj\") pod \"telemetry-operator-controller-manager-76cc84c6bb-zgl8s\" (UID: \"157e4e2d-79c0-42c6-9dd5-a2669945d731\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.383766 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.386011 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.391112 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-gvbtd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.391495 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x99rc\" (UniqueName: \"kubernetes.io/projected/38214819-c905-4c30-8c6c-e8ea8978656f-kube-api-access-x99rc\") pod \"placement-operator-controller-manager-78f8948974-6wz2z\" (UID: \"38214819-c905-4c30-8c6c-e8ea8978656f\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.394406 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk5c9\" (UniqueName: \"kubernetes.io/projected/3ebc517a-dfe9-4462-b92d-b381d254f028-kube-api-access-sk5c9\") pod \"swift-operator-controller-manager-5f8c65bbfc-wxp4b\" (UID: \"3ebc517a-dfe9-4462-b92d-b381d254f028\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.396618 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhlnj\" (UniqueName: \"kubernetes.io/projected/157e4e2d-79c0-42c6-9dd5-a2669945d731-kube-api-access-mhlnj\") pod \"telemetry-operator-controller-manager-76cc84c6bb-zgl8s\" (UID: \"157e4e2d-79c0-42c6-9dd5-a2669945d731\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.417562 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mr8bc\" (UniqueName: \"kubernetes.io/projected/d94674cd-a8c3-4db7-acb1-2a9965fd85e0-kube-api-access-mr8bc\") pod \"test-operator-controller-manager-5854674fcc-mtshp\" (UID: \"d94674cd-a8c3-4db7-acb1-2a9965fd85e0\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.439451 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.488649 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr"] Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.505693 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7mp7\" (UniqueName: \"kubernetes.io/projected/0778e4fe-6293-4282-9978-a2838469affe-kube-api-access-b7mp7\") pod \"watcher-operator-controller-manager-769dc69bc-8pmrk\" (UID: \"0778e4fe-6293-4282-9978-a2838469affe\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.505782 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9db7t\" (UniqueName: \"kubernetes.io/projected/1e38b2a2-19d5-4375-8c74-c46ca488a520-kube-api-access-9db7t\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wllwr\" (UID: \"1e38b2a2-19d5-4375-8c74-c46ca488a520\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.505914 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.505955 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bbfs\" (UniqueName: \"kubernetes.io/projected/d169afb8-577e-430e-84b1-98f34bdbec2c-kube-api-access-8bbfs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.506001 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.506179 4822 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.506241 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:39.006222879 +0000 UTC m=+1014.327030555 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "metrics-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.506584 4822 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.506616 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:39.00660719 +0000 UTC m=+1014.327414876 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.507218 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.533238 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7mp7\" (UniqueName: \"kubernetes.io/projected/0778e4fe-6293-4282-9978-a2838469affe-kube-api-access-b7mp7\") pod \"watcher-operator-controller-manager-769dc69bc-8pmrk\" (UID: \"0778e4fe-6293-4282-9978-a2838469affe\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.540169 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bbfs\" (UniqueName: \"kubernetes.io/projected/d169afb8-577e-430e-84b1-98f34bdbec2c-kube-api-access-8bbfs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.560578 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.566628 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.568341 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.714686 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.720109 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.721035 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9db7t\" (UniqueName: \"kubernetes.io/projected/1e38b2a2-19d5-4375-8c74-c46ca488a520-kube-api-access-9db7t\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wllwr\" (UID: \"1e38b2a2-19d5-4375-8c74-c46ca488a520\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.770454 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9db7t\" (UniqueName: \"kubernetes.io/projected/1e38b2a2-19d5-4375-8c74-c46ca488a520-kube-api-access-9db7t\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wllwr\" (UID: \"1e38b2a2-19d5-4375-8c74-c46ca488a520\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" Dec 01 07:07:38 crc kubenswrapper[4822]: I1201 07:07:38.843846 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.844057 4822 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:38 crc kubenswrapper[4822]: E1201 07:07:38.844131 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert podName:f8f4cc9b-6768-4f0c-a82e-d4a831291fee nodeName:}" failed. No retries permitted until 2025-12-01 07:07:39.844108382 +0000 UTC m=+1015.164916068 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" (UID: "f8f4cc9b-6768-4f0c-a82e-d4a831291fee") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.047499 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.047602 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.047796 4822 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.047873 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:40.047847475 +0000 UTC m=+1015.368655161 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "metrics-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.048773 4822 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.048881 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:40.048848893 +0000 UTC m=+1015.369656579 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "webhook-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.117876 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v"] Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.124153 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.250912 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.251523 4822 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.251597 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert podName:9f7f1540-19b0-48c0-adab-c10e8bdd0fd3 nodeName:}" failed. No retries permitted until 2025-12-01 07:07:41.251574717 +0000 UTC m=+1016.572382403 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert") pod "infra-operator-controller-manager-57548d458d-2s7q5" (UID: "9f7f1540-19b0-48c0-adab-c10e8bdd0fd3") : secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.785199 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" event={"ID":"43d47ccc-afef-42e7-bc18-df3be5e2b4e0","Type":"ContainerStarted","Data":"8d42ba78aef16856a497ba1f77ed283f9f2ebeaa1289357c0f18357972dd9b01"} Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.825304 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff"] Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.857849 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss"] Dec 01 07:07:39 crc kubenswrapper[4822]: I1201 07:07:39.876141 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.876377 4822 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:39 crc kubenswrapper[4822]: E1201 07:07:39.876456 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert podName:f8f4cc9b-6768-4f0c-a82e-d4a831291fee nodeName:}" failed. No retries permitted until 2025-12-01 07:07:41.876408596 +0000 UTC m=+1017.197216282 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" (UID: "f8f4cc9b-6768-4f0c-a82e-d4a831291fee") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.083896 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.083971 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.084277 4822 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.084370 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:42.084349577 +0000 UTC m=+1017.405157263 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "metrics-server-cert" not found Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.084922 4822 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.084968 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:42.084960244 +0000 UTC m=+1017.405767930 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "webhook-server-cert" not found Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.101648 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.123702 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.139418 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27"] Dec 01 07:07:40 crc kubenswrapper[4822]: W1201 07:07:40.142925 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf698804_e3b6_481d_bd2f_34350bae1b8f.slice/crio-6d5eb4eb231b4099ea6b59b0910a3d636eb8c6c77fe41c5268fbf8e7a831f62a WatchSource:0}: Error finding container 6d5eb4eb231b4099ea6b59b0910a3d636eb8c6c77fe41c5268fbf8e7a831f62a: Status 404 returned error can't find the container with id 6d5eb4eb231b4099ea6b59b0910a3d636eb8c6c77fe41c5268fbf8e7a831f62a Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.155562 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.162050 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.501249 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk"] Dec 01 07:07:40 crc kubenswrapper[4822]: W1201 07:07:40.510591 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0778e4fe_6293_4282_9978_a2838469affe.slice/crio-48d5ac37650b650434500a06650c0721eba22fd31eb2ece0a14daf6cbd9061dd WatchSource:0}: Error finding container 48d5ac37650b650434500a06650c0721eba22fd31eb2ece0a14daf6cbd9061dd: Status 404 returned error can't find the container with id 48d5ac37650b650434500a06650c0721eba22fd31eb2ece0a14daf6cbd9061dd Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.522658 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mtshp"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.530731 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.576584 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z"] Dec 01 07:07:40 crc kubenswrapper[4822]: W1201 07:07:40.593274 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ebc517a_dfe9_4462_b92d_b381d254f028.slice/crio-0b9710adf517ebab122f587895794573ebc684dd650d28d93bd925435d9f6d45 WatchSource:0}: Error finding container 0b9710adf517ebab122f587895794573ebc684dd650d28d93bd925435d9f6d45: Status 404 returned error can't find the container with id 0b9710adf517ebab122f587895794573ebc684dd650d28d93bd925435d9f6d45 Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.593515 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b"] Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.601520 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zvwt7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-t2t9n_openstack-operators(b6e7914c-28b1-4241-9db9-ebecda9ede7a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.606703 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n"] Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.608916 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zvwt7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-t2t9n_openstack-operators(b6e7914c-28b1-4241-9db9-ebecda9ede7a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.610233 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" podUID="b6e7914c-28b1-4241-9db9-ebecda9ede7a" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.610863 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lcxrm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-2vpl9_openstack-operators(2ed3d718-e591-45b9-9ae7-f6ed765afa35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.613097 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lcxrm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-2vpl9_openstack-operators(2ed3d718-e591-45b9-9ae7-f6ed765afa35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.614433 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" podUID="2ed3d718-e591-45b9-9ae7-f6ed765afa35" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.615915 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-6shpm"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.623146 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9"] Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.631920 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s"] Dec 01 07:07:40 crc kubenswrapper[4822]: W1201 07:07:40.632919 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea009d0a_d9c2_4265_96bf_6153ce222eef.slice/crio-ca7463cad5d5282169897afde8355a0514dcb0532aa53f5f7ac93ec30d8f3dee WatchSource:0}: Error finding container ca7463cad5d5282169897afde8355a0514dcb0532aa53f5f7ac93ec30d8f3dee: Status 404 returned error can't find the container with id ca7463cad5d5282169897afde8355a0514dcb0532aa53f5f7ac93ec30d8f3dee Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.641107 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5qbsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-546d4bdf48-m8rnk_openstack-operators(ea009d0a-d9c2-4265-96bf-6153ce222eef): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.641201 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk"] Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.641168 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gj2ph,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-49rk7_openstack-operators(4a0f6236-18f0-436a-9544-d76b8d1c3a09): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.645042 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5qbsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-546d4bdf48-m8rnk_openstack-operators(ea009d0a-d9c2-4265-96bf-6153ce222eef): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.645146 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gj2ph,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-49rk7_openstack-operators(4a0f6236-18f0-436a-9544-d76b8d1c3a09): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.646465 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" podUID="4a0f6236-18f0-436a-9544-d76b8d1c3a09" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.646792 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" podUID="ea009d0a-d9c2-4265-96bf-6153ce222eef" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.651263 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7"] Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.657195 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mhlnj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-zgl8s_openstack-operators(157e4e2d-79c0-42c6-9dd5-a2669945d731): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.659081 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr"] Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.663960 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9db7t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-wllwr_openstack-operators(1e38b2a2-19d5-4375-8c74-c46ca488a520): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.664080 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mhlnj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-zgl8s_openstack-operators(157e4e2d-79c0-42c6-9dd5-a2669945d731): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.665196 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" podUID="157e4e2d-79c0-42c6-9dd5-a2669945d731" Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.665296 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" podUID="1e38b2a2-19d5-4375-8c74-c46ca488a520" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.800775 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" event={"ID":"85f27b60-f694-4768-b55e-bb816ed4594b","Type":"ContainerStarted","Data":"f34542fe9d73187171202b053ce970aa876e2b354db490b861631d5c558d4326"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.802866 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" event={"ID":"af698804-e3b6-481d-bd2f-34350bae1b8f","Type":"ContainerStarted","Data":"6d5eb4eb231b4099ea6b59b0910a3d636eb8c6c77fe41c5268fbf8e7a831f62a"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.804232 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" event={"ID":"157e4e2d-79c0-42c6-9dd5-a2669945d731","Type":"ContainerStarted","Data":"c36f8dedc0100df4f3780ae1212193395ad304749a5a4aaec6c691d3d3509a18"} Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.806831 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" podUID="157e4e2d-79c0-42c6-9dd5-a2669945d731" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.806929 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" event={"ID":"2ed3d718-e591-45b9-9ae7-f6ed765afa35","Type":"ContainerStarted","Data":"d5629c2edebe8504022b1c68a9ae3901070b24c64b3b0d00e15498338fd430e8"} Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.808368 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" podUID="2ed3d718-e591-45b9-9ae7-f6ed765afa35" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.809817 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" event={"ID":"ea009d0a-d9c2-4265-96bf-6153ce222eef","Type":"ContainerStarted","Data":"ca7463cad5d5282169897afde8355a0514dcb0532aa53f5f7ac93ec30d8f3dee"} Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.812105 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" podUID="ea009d0a-d9c2-4265-96bf-6153ce222eef" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.812610 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" event={"ID":"825c452f-e271-42a4-ba90-d16f50140303","Type":"ContainerStarted","Data":"2e2012e78d2422e60d64b8d144185e23b9e173f795b574d87dcb703abb978cbd"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.813800 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" event={"ID":"fd0abaec-7f45-438f-843e-1a1dd2cbf841","Type":"ContainerStarted","Data":"500237739bf4f95ccf05ee98e3cc6bb2e8a37b4229c2567c483e55472e377a0e"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.817841 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" event={"ID":"4a0f6236-18f0-436a-9544-d76b8d1c3a09","Type":"ContainerStarted","Data":"eb9a9076a37e7849a151c9cb9e95bfe9c05948145d0ba1d81b90e1b39d857514"} Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.821826 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" podUID="4a0f6236-18f0-436a-9544-d76b8d1c3a09" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.822775 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" event={"ID":"38214819-c905-4c30-8c6c-e8ea8978656f","Type":"ContainerStarted","Data":"6a1a663b13a4d7a66583d222e04c8f9aadebb2668a8b18e6f60c51f0cbc7cb26"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.826457 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" event={"ID":"0778e4fe-6293-4282-9978-a2838469affe","Type":"ContainerStarted","Data":"48d5ac37650b650434500a06650c0721eba22fd31eb2ece0a14daf6cbd9061dd"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.834274 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" event={"ID":"adf58c74-4460-490a-97bb-a2d60a6efffa","Type":"ContainerStarted","Data":"ac6a613795febf94628b3d8e8b7771a126600452635d77008141f68020bd6cf3"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.836468 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" event={"ID":"031f17da-46a5-4904-b32c-968dbd5959c1","Type":"ContainerStarted","Data":"5ce71cfb84a15498a57c96ca0eec32ceafd917780d19155060b7d9d62c22227d"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.842961 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" event={"ID":"44bcc489-7661-42ff-b164-4bc2fea1a426","Type":"ContainerStarted","Data":"b912aca5b5a5e1dc284076581ccafcc9c1f7984ba02e0e31e44dcdd97e7d7635"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.861457 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" event={"ID":"d94674cd-a8c3-4db7-acb1-2a9965fd85e0","Type":"ContainerStarted","Data":"acb1811d739d1a56ad994ac53b0b14e4fb996c1d871da5ed388c8bfcc1471444"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.869233 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" event={"ID":"1e38b2a2-19d5-4375-8c74-c46ca488a520","Type":"ContainerStarted","Data":"f800495683639b93e4f8769dd127b6bda536b8d45f7a32e51e787f335c6813e5"} Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.871840 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" podUID="1e38b2a2-19d5-4375-8c74-c46ca488a520" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.873815 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" event={"ID":"7fd09048-e506-48e1-9d30-01cbd6117fcc","Type":"ContainerStarted","Data":"e2534d6a8eb7e46d068216bdd28f6c73cee34094b55dfed2880d3287b37fbee7"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.877005 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" event={"ID":"b6e7914c-28b1-4241-9db9-ebecda9ede7a","Type":"ContainerStarted","Data":"77fb86edee431f3df6c8b051bb62fe6a006c56522fa1552212555bf07f269bcf"} Dec 01 07:07:40 crc kubenswrapper[4822]: E1201 07:07:40.881147 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" podUID="b6e7914c-28b1-4241-9db9-ebecda9ede7a" Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.881855 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" event={"ID":"ee5199ce-b0f1-4753-a317-8d4b95bca11b","Type":"ContainerStarted","Data":"1d2d80ee016948edbd7345201cc339b2e4332f45304bd7acfae1a94d7af76c54"} Dec 01 07:07:40 crc kubenswrapper[4822]: I1201 07:07:40.889200 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" event={"ID":"3ebc517a-dfe9-4462-b92d-b381d254f028","Type":"ContainerStarted","Data":"0b9710adf517ebab122f587895794573ebc684dd650d28d93bd925435d9f6d45"} Dec 01 07:07:41 crc kubenswrapper[4822]: I1201 07:07:41.319427 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.319604 4822 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.319695 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert podName:9f7f1540-19b0-48c0-adab-c10e8bdd0fd3 nodeName:}" failed. No retries permitted until 2025-12-01 07:07:45.319680712 +0000 UTC m=+1020.640488388 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert") pod "infra-operator-controller-manager-57548d458d-2s7q5" (UID: "9f7f1540-19b0-48c0-adab-c10e8bdd0fd3") : secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.901809 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" podUID="1e38b2a2-19d5-4375-8c74-c46ca488a520" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.902361 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" podUID="b6e7914c-28b1-4241-9db9-ebecda9ede7a" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.904281 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" podUID="4a0f6236-18f0-436a-9544-d76b8d1c3a09" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.905036 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" podUID="ea009d0a-d9c2-4265-96bf-6153ce222eef" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.905731 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" podUID="2ed3d718-e591-45b9-9ae7-f6ed765afa35" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.905886 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" podUID="157e4e2d-79c0-42c6-9dd5-a2669945d731" Dec 01 07:07:41 crc kubenswrapper[4822]: I1201 07:07:41.940344 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.940526 4822 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:41 crc kubenswrapper[4822]: E1201 07:07:41.940586 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert podName:f8f4cc9b-6768-4f0c-a82e-d4a831291fee nodeName:}" failed. No retries permitted until 2025-12-01 07:07:45.940572371 +0000 UTC m=+1021.261380057 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" (UID: "f8f4cc9b-6768-4f0c-a82e-d4a831291fee") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:42 crc kubenswrapper[4822]: I1201 07:07:42.143416 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:42 crc kubenswrapper[4822]: I1201 07:07:42.143486 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:42 crc kubenswrapper[4822]: E1201 07:07:42.143730 4822 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 07:07:42 crc kubenswrapper[4822]: E1201 07:07:42.143789 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:46.143772158 +0000 UTC m=+1021.464579844 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "metrics-server-cert" not found Dec 01 07:07:42 crc kubenswrapper[4822]: E1201 07:07:42.144125 4822 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 07:07:42 crc kubenswrapper[4822]: E1201 07:07:42.144259 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:46.144232251 +0000 UTC m=+1021.465039937 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "webhook-server-cert" not found Dec 01 07:07:45 crc kubenswrapper[4822]: I1201 07:07:45.398508 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:45 crc kubenswrapper[4822]: E1201 07:07:45.398778 4822 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:45 crc kubenswrapper[4822]: E1201 07:07:45.399715 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert podName:9f7f1540-19b0-48c0-adab-c10e8bdd0fd3 nodeName:}" failed. No retries permitted until 2025-12-01 07:07:53.399688328 +0000 UTC m=+1028.720496204 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert") pod "infra-operator-controller-manager-57548d458d-2s7q5" (UID: "9f7f1540-19b0-48c0-adab-c10e8bdd0fd3") : secret "infra-operator-webhook-server-cert" not found Dec 01 07:07:46 crc kubenswrapper[4822]: I1201 07:07:46.009518 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:46 crc kubenswrapper[4822]: E1201 07:07:46.009747 4822 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:46 crc kubenswrapper[4822]: E1201 07:07:46.009812 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert podName:f8f4cc9b-6768-4f0c-a82e-d4a831291fee nodeName:}" failed. No retries permitted until 2025-12-01 07:07:54.009795674 +0000 UTC m=+1029.330603360 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" (UID: "f8f4cc9b-6768-4f0c-a82e-d4a831291fee") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 07:07:46 crc kubenswrapper[4822]: I1201 07:07:46.211820 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:46 crc kubenswrapper[4822]: I1201 07:07:46.211896 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:46 crc kubenswrapper[4822]: E1201 07:07:46.211975 4822 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 07:07:46 crc kubenswrapper[4822]: E1201 07:07:46.212043 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:54.212025904 +0000 UTC m=+1029.532833590 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "webhook-server-cert" not found Dec 01 07:07:46 crc kubenswrapper[4822]: E1201 07:07:46.212054 4822 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 07:07:46 crc kubenswrapper[4822]: E1201 07:07:46.212142 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:07:54.212121387 +0000 UTC m=+1029.532929093 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "metrics-server-cert" not found Dec 01 07:07:53 crc kubenswrapper[4822]: I1201 07:07:53.466661 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:53 crc kubenswrapper[4822]: I1201 07:07:53.474952 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f7f1540-19b0-48c0-adab-c10e8bdd0fd3-cert\") pod \"infra-operator-controller-manager-57548d458d-2s7q5\" (UID: \"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:53 crc kubenswrapper[4822]: I1201 07:07:53.754894 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-9cn6w" Dec 01 07:07:53 crc kubenswrapper[4822]: I1201 07:07:53.763112 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.073932 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.080583 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f8f4cc9b-6768-4f0c-a82e-d4a831291fee-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb4462f7ll\" (UID: \"f8f4cc9b-6768-4f0c-a82e-d4a831291fee\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.133818 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-m5sx8" Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.141758 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:07:54 crc kubenswrapper[4822]: E1201 07:07:54.199773 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:ecf7be921850bdc04697ed1b332bab39ad2a64e4e45c2a445c04f9bae6ac61b5" Dec 01 07:07:54 crc kubenswrapper[4822]: E1201 07:07:54.199973 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:ecf7be921850bdc04697ed1b332bab39ad2a64e4e45c2a445c04f9bae6ac61b5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-swmzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6546668bfd-vlj27_openstack-operators(af698804-e3b6-481d-bd2f-34350bae1b8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.277455 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.277535 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:54 crc kubenswrapper[4822]: E1201 07:07:54.277693 4822 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 07:07:54 crc kubenswrapper[4822]: E1201 07:07:54.277785 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs podName:d169afb8-577e-430e-84b1-98f34bdbec2c nodeName:}" failed. No retries permitted until 2025-12-01 07:08:10.277762902 +0000 UTC m=+1045.598570588 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-kmpbd" (UID: "d169afb8-577e-430e-84b1-98f34bdbec2c") : secret "webhook-server-cert" not found Dec 01 07:07:54 crc kubenswrapper[4822]: I1201 07:07:54.283380 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:07:55 crc kubenswrapper[4822]: E1201 07:07:55.006186 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 01 07:07:55 crc kubenswrapper[4822]: E1201 07:07:55.006543 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cr6xb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-6shpm_openstack-operators(825c452f-e271-42a4-ba90-d16f50140303): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:07:55 crc kubenswrapper[4822]: E1201 07:07:55.737877 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 01 07:07:55 crc kubenswrapper[4822]: E1201 07:07:55.738054 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rvxq5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-275w4_openstack-operators(85f27b60-f694-4768-b55e-bb816ed4594b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:07:56 crc kubenswrapper[4822]: E1201 07:07:56.315774 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 01 07:07:56 crc kubenswrapper[4822]: E1201 07:07:56.317246 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mr8bc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-mtshp_openstack-operators(d94674cd-a8c3-4db7-acb1-2a9965fd85e0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:07:56 crc kubenswrapper[4822]: E1201 07:07:56.906496 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 01 07:07:56 crc kubenswrapper[4822]: E1201 07:07:56.906714 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hh5xt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-dmqx6_openstack-operators(44bcc489-7661-42ff-b164-4bc2fea1a426): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:07:57 crc kubenswrapper[4822]: E1201 07:07:57.580724 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d" Dec 01 07:07:57 crc kubenswrapper[4822]: E1201 07:07:57.580921 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sk5c9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-wxp4b_openstack-operators(3ebc517a-dfe9-4462-b92d-b381d254f028): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:08:01 crc kubenswrapper[4822]: E1201 07:08:01.308162 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:440cde33d3a2a0c545cd1c110a3634eb85544370f448865b97a13c38034b0172" Dec 01 07:08:01 crc kubenswrapper[4822]: E1201 07:08:01.308627 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:440cde33d3a2a0c545cd1c110a3634eb85544370f448865b97a13c38034b0172,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tsf78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-668d9c48b9-fkvck_openstack-operators(031f17da-46a5-4904-b32c-968dbd5959c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:08:06 crc kubenswrapper[4822]: I1201 07:08:06.980444 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5"] Dec 01 07:08:07 crc kubenswrapper[4822]: I1201 07:08:07.021969 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll"] Dec 01 07:08:07 crc kubenswrapper[4822]: W1201 07:08:07.084449 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8f4cc9b_6768_4f0c_a82e_d4a831291fee.slice/crio-e28e3452de003f9a105c24690840e3cf4a592af13ac494f373388d5eb29c1e9d WatchSource:0}: Error finding container e28e3452de003f9a105c24690840e3cf4a592af13ac494f373388d5eb29c1e9d: Status 404 returned error can't find the container with id e28e3452de003f9a105c24690840e3cf4a592af13ac494f373388d5eb29c1e9d Dec 01 07:08:07 crc kubenswrapper[4822]: W1201 07:08:07.086744 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f7f1540_19b0_48c0_adab_c10e8bdd0fd3.slice/crio-13182965921d2a976b4ea468553c97a010a5f1c936f2697f8b9137113dffd82a WatchSource:0}: Error finding container 13182965921d2a976b4ea468553c97a010a5f1c936f2697f8b9137113dffd82a: Status 404 returned error can't find the container with id 13182965921d2a976b4ea468553c97a010a5f1c936f2697f8b9137113dffd82a Dec 01 07:08:07 crc kubenswrapper[4822]: I1201 07:08:07.359878 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" event={"ID":"f8f4cc9b-6768-4f0c-a82e-d4a831291fee","Type":"ContainerStarted","Data":"e28e3452de003f9a105c24690840e3cf4a592af13ac494f373388d5eb29c1e9d"} Dec 01 07:08:07 crc kubenswrapper[4822]: I1201 07:08:07.362088 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" event={"ID":"0778e4fe-6293-4282-9978-a2838469affe","Type":"ContainerStarted","Data":"da27982f2e0fceb384c4683807a8060f3946aaf6893b6f990e0adb711bc9c4e2"} Dec 01 07:08:07 crc kubenswrapper[4822]: I1201 07:08:07.364166 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" event={"ID":"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3","Type":"ContainerStarted","Data":"13182965921d2a976b4ea468553c97a010a5f1c936f2697f8b9137113dffd82a"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.484541 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" event={"ID":"fd0abaec-7f45-438f-843e-1a1dd2cbf841","Type":"ContainerStarted","Data":"829bb3a34a87cac151c50c3dbdd127430d40eb0c5d19aa500e08161b9bcbc48a"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.486887 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" event={"ID":"43d47ccc-afef-42e7-bc18-df3be5e2b4e0","Type":"ContainerStarted","Data":"16e1c40f46ecaaecf10f229c175c9350a986f9c3bdece7745e8b044b16129c82"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.488752 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" event={"ID":"1e38b2a2-19d5-4375-8c74-c46ca488a520","Type":"ContainerStarted","Data":"dfeac0bf3cfa069d81ed77263db9a94b7a732470bb2d7402fa404f5b5e463317"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.490091 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" event={"ID":"adf58c74-4460-490a-97bb-a2d60a6efffa","Type":"ContainerStarted","Data":"da968560549b73d43f9c917d6b87551e9711bf5da3c74c4c2f92182653990532"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.491435 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" event={"ID":"b6e7914c-28b1-4241-9db9-ebecda9ede7a","Type":"ContainerStarted","Data":"69a325e0e09a0cd3c5c9e2d2a2d0193ed7c706561021bdda18bb576940115649"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.492607 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" event={"ID":"ee5199ce-b0f1-4753-a317-8d4b95bca11b","Type":"ContainerStarted","Data":"f6ca4d40a1ae7fe2e70afbfe4fa544d74bdbb509eab312f36e8fbc5cbf2545ea"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.493951 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" event={"ID":"157e4e2d-79c0-42c6-9dd5-a2669945d731","Type":"ContainerStarted","Data":"c5a1155a610e74920edcd7f21a1072cf49cd0dd0f32182ac2abd6ac07f0c5a5a"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.495222 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" event={"ID":"38214819-c905-4c30-8c6c-e8ea8978656f","Type":"ContainerStarted","Data":"ec42060ebabb164424dfc00fac493c2f7d5a10ad166f176c4e909551943257a1"} Dec 01 07:08:08 crc kubenswrapper[4822]: I1201 07:08:08.509375 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" event={"ID":"7fd09048-e506-48e1-9d30-01cbd6117fcc","Type":"ContainerStarted","Data":"9e8bcf56d4b3412adea2facc08b78d3021efe892346e8628e7a7aa6f1a5f6d5a"} Dec 01 07:08:10 crc kubenswrapper[4822]: I1201 07:08:10.314483 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:08:10 crc kubenswrapper[4822]: I1201 07:08:10.353893 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d169afb8-577e-430e-84b1-98f34bdbec2c-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-kmpbd\" (UID: \"d169afb8-577e-430e-84b1-98f34bdbec2c\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:08:10 crc kubenswrapper[4822]: I1201 07:08:10.437170 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7kktl" Dec 01 07:08:10 crc kubenswrapper[4822]: I1201 07:08:10.445682 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:08:10 crc kubenswrapper[4822]: I1201 07:08:10.571884 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wllwr" podStartSLOduration=7.956328963 podStartE2EDuration="32.571866224s" podCreationTimestamp="2025-12-01 07:07:38 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.663799433 +0000 UTC m=+1015.984607119" lastFinishedPulling="2025-12-01 07:08:05.279336694 +0000 UTC m=+1040.600144380" observedRunningTime="2025-12-01 07:08:10.570718342 +0000 UTC m=+1045.891526028" watchObservedRunningTime="2025-12-01 07:08:10.571866224 +0000 UTC m=+1045.892673900" Dec 01 07:08:17 crc kubenswrapper[4822]: I1201 07:08:17.864070 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd"] Dec 01 07:08:17 crc kubenswrapper[4822]: W1201 07:08:17.875077 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd169afb8_577e_430e_84b1_98f34bdbec2c.slice/crio-41704862ae8edba401f42975eb0536e8aa8750784fb93a95557775daf7547f20 WatchSource:0}: Error finding container 41704862ae8edba401f42975eb0536e8aa8750784fb93a95557775daf7547f20: Status 404 returned error can't find the container with id 41704862ae8edba401f42975eb0536e8aa8750784fb93a95557775daf7547f20 Dec 01 07:08:18 crc kubenswrapper[4822]: I1201 07:08:18.624898 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" event={"ID":"d169afb8-577e-430e-84b1-98f34bdbec2c","Type":"ContainerStarted","Data":"41704862ae8edba401f42975eb0536e8aa8750784fb93a95557775daf7547f20"} Dec 01 07:08:18 crc kubenswrapper[4822]: I1201 07:08:18.630003 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" event={"ID":"4a0f6236-18f0-436a-9544-d76b8d1c3a09","Type":"ContainerStarted","Data":"9a163880b3d624525a6d8214776e63d090b5d884f813ff435143350a2c9067f0"} Dec 01 07:08:18 crc kubenswrapper[4822]: E1201 07:08:18.687773 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 01 07:08:18 crc kubenswrapper[4822]: E1201 07:08:18.687957 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cr6xb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-6shpm_openstack-operators(825c452f-e271-42a4-ba90-d16f50140303): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:08:18 crc kubenswrapper[4822]: E1201 07:08:18.689598 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" podUID="825c452f-e271-42a4-ba90-d16f50140303" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.046029 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" podUID="3ebc517a-dfe9-4462-b92d-b381d254f028" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.191338 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.191479 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hh5xt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-dmqx6_openstack-operators(44bcc489-7661-42ff-b164-4bc2fea1a426): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.192691 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" podUID="44bcc489-7661-42ff-b164-4bc2fea1a426" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.627111 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.627393 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-swmzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6546668bfd-vlj27_openstack-operators(af698804-e3b6-481d-bd2f-34350bae1b8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:08:19 crc kubenswrapper[4822]: E1201 07:08:19.628631 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" podUID="af698804-e3b6-481d-bd2f-34350bae1b8f" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.669281 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" event={"ID":"4a0f6236-18f0-436a-9544-d76b8d1c3a09","Type":"ContainerStarted","Data":"3a508a152ef15de1e472abb2ec8ffa22546e10f9d104bb52ac3aea7ec579bc81"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.670565 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.685141 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" event={"ID":"38214819-c905-4c30-8c6c-e8ea8978656f","Type":"ContainerStarted","Data":"2c9fb1841eb1a869a06c927789f91d005263adbd19228eba33b7798976caa928"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.693105 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.695755 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.699543 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" podStartSLOduration=4.231906891 podStartE2EDuration="42.69952455s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.641031135 +0000 UTC m=+1015.961838821" lastFinishedPulling="2025-12-01 07:08:19.108648794 +0000 UTC m=+1054.429456480" observedRunningTime="2025-12-01 07:08:19.694491059 +0000 UTC m=+1055.015298755" watchObservedRunningTime="2025-12-01 07:08:19.69952455 +0000 UTC m=+1055.020332306" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.715470 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" event={"ID":"0778e4fe-6293-4282-9978-a2838469affe","Type":"ContainerStarted","Data":"a54f66d3571f552541f84a48606a1be044ae92ae52a53809ad2457c363769fed"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.715999 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.720029 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.720899 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-6wz2z" podStartSLOduration=4.487941095 podStartE2EDuration="42.72088327s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.582679369 +0000 UTC m=+1015.903487055" lastFinishedPulling="2025-12-01 07:08:18.815621524 +0000 UTC m=+1054.136429230" observedRunningTime="2025-12-01 07:08:19.713725279 +0000 UTC m=+1055.034532965" watchObservedRunningTime="2025-12-01 07:08:19.72088327 +0000 UTC m=+1055.041690956" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.721349 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" event={"ID":"adf58c74-4460-490a-97bb-a2d60a6efffa","Type":"ContainerStarted","Data":"b7a4f763e3f7c31a89a3bd217f6d819353e7e93705b74d9c2688567048630f2b"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.726341 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.726825 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.731739 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" event={"ID":"ee5199ce-b0f1-4753-a317-8d4b95bca11b","Type":"ContainerStarted","Data":"661223ec5f7b27e235af95168a566d08f93227b5ffcdd9e301c28011520225d0"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.735389 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.736976 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.741523 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" event={"ID":"157e4e2d-79c0-42c6-9dd5-a2669945d731","Type":"ContainerStarted","Data":"a596e48370f7b12e820ceca4829fa5165a2c51856e3fc7ed82eed3332b82eae8"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.743729 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.751872 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.753071 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" event={"ID":"2ed3d718-e591-45b9-9ae7-f6ed765afa35","Type":"ContainerStarted","Data":"5959ea453639c335e25b8a46e3df31f982390b0b8afae8e49d4320df90d8ca92"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.754402 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" event={"ID":"ea009d0a-d9c2-4265-96bf-6153ce222eef","Type":"ContainerStarted","Data":"56c72539232ea86343b0f9d956a2b0cf1277dba5a16b01f6651a1c0582790f63"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.766012 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" event={"ID":"d169afb8-577e-430e-84b1-98f34bdbec2c","Type":"ContainerStarted","Data":"fe8641260ee6716bae6e9441c0fef496c1eafb1d571cbc8f7716f34d8b7c4edd"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.767166 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.773839 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-4xwss" podStartSLOduration=3.531840646 podStartE2EDuration="42.773814767s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:39.879195425 +0000 UTC m=+1015.200003111" lastFinishedPulling="2025-12-01 07:08:19.121169546 +0000 UTC m=+1054.441977232" observedRunningTime="2025-12-01 07:08:19.766770179 +0000 UTC m=+1055.087577865" watchObservedRunningTime="2025-12-01 07:08:19.773814767 +0000 UTC m=+1055.094622453" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.780634 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" event={"ID":"3ebc517a-dfe9-4462-b92d-b381d254f028","Type":"ContainerStarted","Data":"f302668e695b2ff3895020b2d02bc61ecf61b278484180256e374e7be87ab248"} Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.812886 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-k72b6" podStartSLOduration=3.870382063 podStartE2EDuration="42.812865933s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.09659557 +0000 UTC m=+1015.417403256" lastFinishedPulling="2025-12-01 07:08:19.03907944 +0000 UTC m=+1054.359887126" observedRunningTime="2025-12-01 07:08:19.799393185 +0000 UTC m=+1055.120200871" watchObservedRunningTime="2025-12-01 07:08:19.812865933 +0000 UTC m=+1055.133673619" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.832340 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-8pmrk" podStartSLOduration=4.454747685 podStartE2EDuration="42.83232285s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.52780032 +0000 UTC m=+1015.848608006" lastFinishedPulling="2025-12-01 07:08:18.905375485 +0000 UTC m=+1054.226183171" observedRunningTime="2025-12-01 07:08:19.83197839 +0000 UTC m=+1055.152786076" watchObservedRunningTime="2025-12-01 07:08:19.83232285 +0000 UTC m=+1055.153130536" Dec 01 07:08:19 crc kubenswrapper[4822]: I1201 07:08:19.931390 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-zgl8s" podStartSLOduration=4.859007064 podStartE2EDuration="42.931374522s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.657037124 +0000 UTC m=+1015.977844810" lastFinishedPulling="2025-12-01 07:08:18.729404562 +0000 UTC m=+1054.050212268" observedRunningTime="2025-12-01 07:08:19.930846197 +0000 UTC m=+1055.251653883" watchObservedRunningTime="2025-12-01 07:08:19.931374522 +0000 UTC m=+1055.252182208" Dec 01 07:08:20 crc kubenswrapper[4822]: I1201 07:08:20.020923 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" podStartSLOduration=42.020906387 podStartE2EDuration="42.020906387s" podCreationTimestamp="2025-12-01 07:07:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:08:20.0160586 +0000 UTC m=+1055.336866286" watchObservedRunningTime="2025-12-01 07:08:20.020906387 +0000 UTC m=+1055.341714073" Dec 01 07:08:20 crc kubenswrapper[4822]: I1201 07:08:20.795976 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" event={"ID":"2ed3d718-e591-45b9-9ae7-f6ed765afa35","Type":"ContainerStarted","Data":"19e537fa06f3d089c6a7ff16501452ebc6eeeb5d9232567db4c1d83c7a2d131a"} Dec 01 07:08:20 crc kubenswrapper[4822]: I1201 07:08:20.814017 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" podStartSLOduration=5.233637307 podStartE2EDuration="43.813986961s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.608329858 +0000 UTC m=+1015.929137544" lastFinishedPulling="2025-12-01 07:08:19.188679512 +0000 UTC m=+1054.509487198" observedRunningTime="2025-12-01 07:08:20.811898372 +0000 UTC m=+1056.132706068" watchObservedRunningTime="2025-12-01 07:08:20.813986961 +0000 UTC m=+1056.134794647" Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.809931 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" event={"ID":"b6e7914c-28b1-4241-9db9-ebecda9ede7a","Type":"ContainerStarted","Data":"9dee84c6dd23d675aee2cf74e1b51ed3ae8d5d5422b846faf4da4affc8e1f330"} Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.810383 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.811488 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" event={"ID":"ea009d0a-d9c2-4265-96bf-6153ce222eef","Type":"ContainerStarted","Data":"02b4c7edfd25fa3f14999f5ff040978b6c17e6e7402f51865afb9f3af84dfd79"} Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.813770 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.813815 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.832341 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-t2t9n" podStartSLOduration=6.187799988 podStartE2EDuration="44.832310233s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.601371773 +0000 UTC m=+1015.922179459" lastFinishedPulling="2025-12-01 07:08:19.245882018 +0000 UTC m=+1054.566689704" observedRunningTime="2025-12-01 07:08:21.825892603 +0000 UTC m=+1057.146700289" watchObservedRunningTime="2025-12-01 07:08:21.832310233 +0000 UTC m=+1057.153117919" Dec 01 07:08:21 crc kubenswrapper[4822]: I1201 07:08:21.907247 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" podStartSLOduration=6.33537038 podStartE2EDuration="44.907221797s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.640896221 +0000 UTC m=+1015.961703907" lastFinishedPulling="2025-12-01 07:08:19.212747638 +0000 UTC m=+1054.533555324" observedRunningTime="2025-12-01 07:08:21.9009189 +0000 UTC m=+1057.221726616" watchObservedRunningTime="2025-12-01 07:08:21.907221797 +0000 UTC m=+1057.228029483" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.837834 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" event={"ID":"3ebc517a-dfe9-4462-b92d-b381d254f028","Type":"ContainerStarted","Data":"d1f0d35803f44e64628bb36693de58e1c194897867ed6e0a5645c626b7ec4d6e"} Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.838396 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.848981 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" event={"ID":"f8f4cc9b-6768-4f0c-a82e-d4a831291fee","Type":"ContainerStarted","Data":"a5422415b59464c0af21ebe2059f2256e169230ab7a92d0bc28e37d45d324b8e"} Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.852879 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" event={"ID":"825c452f-e271-42a4-ba90-d16f50140303","Type":"ContainerStarted","Data":"f6da5ecf5401d229a1e708861b3ac714627fe91ff8a09b0a4de81c5fd4eb14fd"} Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.861379 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" podStartSLOduration=4.018992743 podStartE2EDuration="45.861363806s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.599802679 +0000 UTC m=+1015.920610365" lastFinishedPulling="2025-12-01 07:08:22.442173732 +0000 UTC m=+1057.762981428" observedRunningTime="2025-12-01 07:08:22.860632845 +0000 UTC m=+1058.181440531" watchObservedRunningTime="2025-12-01 07:08:22.861363806 +0000 UTC m=+1058.182171492" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.864828 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" event={"ID":"fd0abaec-7f45-438f-843e-1a1dd2cbf841","Type":"ContainerStarted","Data":"074ebbcea8deb801bd3aa7e9b34b165bdbbf9915f04fb6887aec2f1d386bf210"} Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.865746 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.870146 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" event={"ID":"43d47ccc-afef-42e7-bc18-df3be5e2b4e0","Type":"ContainerStarted","Data":"7d199e259778f57e6d8ab911b8a5a6ea35309b22a0bf831346e608563ad73a97"} Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.871019 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.871046 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.878449 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.880808 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.887709 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f4bff" podStartSLOduration=6.332176218 podStartE2EDuration="45.887690165s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:39.868056802 +0000 UTC m=+1015.188864488" lastFinishedPulling="2025-12-01 07:08:19.423570749 +0000 UTC m=+1054.744378435" observedRunningTime="2025-12-01 07:08:22.882927202 +0000 UTC m=+1058.203734888" watchObservedRunningTime="2025-12-01 07:08:22.887690165 +0000 UTC m=+1058.208497851" Dec 01 07:08:22 crc kubenswrapper[4822]: I1201 07:08:22.907133 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pbb9v" podStartSLOduration=2.8229281520000002 podStartE2EDuration="45.90709664s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:39.302475824 +0000 UTC m=+1014.623283510" lastFinishedPulling="2025-12-01 07:08:22.386644312 +0000 UTC m=+1057.707451998" observedRunningTime="2025-12-01 07:08:22.906189065 +0000 UTC m=+1058.226996751" watchObservedRunningTime="2025-12-01 07:08:22.90709664 +0000 UTC m=+1058.227904326" Dec 01 07:08:22 crc kubenswrapper[4822]: E1201 07:08:22.988632 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" podUID="031f17da-46a5-4904-b32c-968dbd5959c1" Dec 01 07:08:23 crc kubenswrapper[4822]: E1201 07:08:23.062914 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" podUID="d94674cd-a8c3-4db7-acb1-2a9965fd85e0" Dec 01 07:08:23 crc kubenswrapper[4822]: E1201 07:08:23.089185 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" podUID="85f27b60-f694-4768-b55e-bb816ed4594b" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.878320 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" event={"ID":"af698804-e3b6-481d-bd2f-34350bae1b8f","Type":"ContainerStarted","Data":"06a2918523342dc4e5256124f408697eb8cc37c0d878d3a0b7b05af22a353af3"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.878383 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" event={"ID":"af698804-e3b6-481d-bd2f-34350bae1b8f","Type":"ContainerStarted","Data":"2c96cef14aeec5be7ea995c794592f051749e852383ce3b0c56f8e495785f5f4"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.880182 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.883059 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" event={"ID":"85f27b60-f694-4768-b55e-bb816ed4594b","Type":"ContainerStarted","Data":"fb3ad54ad2f8aa8fe38d72e77825c6c4b80b93ed26994bd5d5694f9faf29a592"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.888940 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" event={"ID":"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3","Type":"ContainerStarted","Data":"02c9c0d022260d9d3ad22aea7b6af817ccce3a9f0b1f05988be07ff24d63286e"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.888996 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" event={"ID":"9f7f1540-19b0-48c0-adab-c10e8bdd0fd3","Type":"ContainerStarted","Data":"463f86e19f765e6170c2a3ecfeb63c8ae71ff1792c2306173350dcea6fded6fc"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.889939 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.893725 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" event={"ID":"44bcc489-7661-42ff-b164-4bc2fea1a426","Type":"ContainerStarted","Data":"1c77f4d2bb93fed3a1b019ae67bd241c68eae7dc17c45a3ac46fda13b47f762c"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.893778 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" event={"ID":"44bcc489-7661-42ff-b164-4bc2fea1a426","Type":"ContainerStarted","Data":"681567d208e9ca9fbcf03c5fa8a3d623584192c84571333c387a4332862dae9d"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.894578 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.895827 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" event={"ID":"7fd09048-e506-48e1-9d30-01cbd6117fcc","Type":"ContainerStarted","Data":"6263504bb4b693c88e9eaf4fc47424a1e8f253f1935e76091b74afc636bf8cf7"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.897397 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.898289 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.898598 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" event={"ID":"d94674cd-a8c3-4db7-acb1-2a9965fd85e0","Type":"ContainerStarted","Data":"df6e7d25d3ec53dc2a2888fe70e96ea315835008a337bc3e33ffecffb3ad38c1"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.902063 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" event={"ID":"031f17da-46a5-4904-b32c-968dbd5959c1","Type":"ContainerStarted","Data":"2e0926dba70a1b6895ffa91ddc1f5e062a29f6b990af68d483ebbe2f0e045502"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.905765 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" event={"ID":"f8f4cc9b-6768-4f0c-a82e-d4a831291fee","Type":"ContainerStarted","Data":"2b1ffa615f79dc7a4289946ff986ed70dd07e25c910cedf753044c0a347dcf46"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.908078 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.914442 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" event={"ID":"825c452f-e271-42a4-ba90-d16f50140303","Type":"ContainerStarted","Data":"959eddca0a44589043742c59f7e865e8a56e2883ad64e64d6848adb86ddcde2f"} Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.916265 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.917096 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-m8rnk" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.940759 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" podStartSLOduration=4.639364329 podStartE2EDuration="46.940743512s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.1515201 +0000 UTC m=+1015.472327786" lastFinishedPulling="2025-12-01 07:08:22.452899253 +0000 UTC m=+1057.773706969" observedRunningTime="2025-12-01 07:08:23.908221568 +0000 UTC m=+1059.229029244" watchObservedRunningTime="2025-12-01 07:08:23.940743512 +0000 UTC m=+1059.261551198" Dec 01 07:08:23 crc kubenswrapper[4822]: I1201 07:08:23.941568 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" podStartSLOduration=31.58787079 podStartE2EDuration="46.941564185s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:08:07.089097964 +0000 UTC m=+1042.409905650" lastFinishedPulling="2025-12-01 07:08:22.442791309 +0000 UTC m=+1057.763599045" observedRunningTime="2025-12-01 07:08:23.936388299 +0000 UTC m=+1059.257195985" watchObservedRunningTime="2025-12-01 07:08:23.941564185 +0000 UTC m=+1059.262371871" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.121989 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mbsh5" podStartSLOduration=4.807578343 podStartE2EDuration="47.121971502s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.14260716 +0000 UTC m=+1015.463414846" lastFinishedPulling="2025-12-01 07:08:22.457000309 +0000 UTC m=+1057.777808005" observedRunningTime="2025-12-01 07:08:24.072298287 +0000 UTC m=+1059.393105983" watchObservedRunningTime="2025-12-01 07:08:24.121971502 +0000 UTC m=+1059.442779188" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.162703 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" podStartSLOduration=5.300888238 podStartE2EDuration="47.162682895s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.580959551 +0000 UTC m=+1015.901767227" lastFinishedPulling="2025-12-01 07:08:22.442754188 +0000 UTC m=+1057.763561884" observedRunningTime="2025-12-01 07:08:24.113417441 +0000 UTC m=+1059.434225137" watchObservedRunningTime="2025-12-01 07:08:24.162682895 +0000 UTC m=+1059.483490601" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.295007 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" podStartSLOduration=5.480248074 podStartE2EDuration="47.294975211s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.600810987 +0000 UTC m=+1015.921618673" lastFinishedPulling="2025-12-01 07:08:22.415538124 +0000 UTC m=+1057.736345810" observedRunningTime="2025-12-01 07:08:24.286903824 +0000 UTC m=+1059.607711530" watchObservedRunningTime="2025-12-01 07:08:24.294975211 +0000 UTC m=+1059.615782897" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.307780 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" podStartSLOduration=32.008269927 podStartE2EDuration="47.30775351s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:08:07.116511534 +0000 UTC m=+1042.437319220" lastFinishedPulling="2025-12-01 07:08:22.415995107 +0000 UTC m=+1057.736802803" observedRunningTime="2025-12-01 07:08:24.264468444 +0000 UTC m=+1059.585276150" watchObservedRunningTime="2025-12-01 07:08:24.30775351 +0000 UTC m=+1059.628561196" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.920824 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" event={"ID":"d94674cd-a8c3-4db7-acb1-2a9965fd85e0","Type":"ContainerStarted","Data":"dab4b73a14386aa3d2ee1285bbd5a066599a39d03336cc3e80ccf8064c0e4fb7"} Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.920938 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.922621 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" event={"ID":"85f27b60-f694-4768-b55e-bb816ed4594b","Type":"ContainerStarted","Data":"244cab578ba51fcafc231aa6948b141fe00609b09397a9a17c93e6b867e7f45a"} Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.922830 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.924761 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" event={"ID":"031f17da-46a5-4904-b32c-968dbd5959c1","Type":"ContainerStarted","Data":"e020e86c1b1b35718c07287be3002e7fe42ae72b7bb8d9677c48397fc1095ade"} Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.943973 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" podStartSLOduration=3.970324108 podStartE2EDuration="47.943948739s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.567885474 +0000 UTC m=+1015.888693160" lastFinishedPulling="2025-12-01 07:08:24.541510105 +0000 UTC m=+1059.862317791" observedRunningTime="2025-12-01 07:08:24.940618495 +0000 UTC m=+1060.261426181" watchObservedRunningTime="2025-12-01 07:08:24.943948739 +0000 UTC m=+1060.264756435" Dec 01 07:08:24 crc kubenswrapper[4822]: I1201 07:08:24.963153 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" podStartSLOduration=3.54954498 podStartE2EDuration="47.963136317s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.129166333 +0000 UTC m=+1015.449974019" lastFinishedPulling="2025-12-01 07:08:24.54275767 +0000 UTC m=+1059.863565356" observedRunningTime="2025-12-01 07:08:24.961484291 +0000 UTC m=+1060.282291977" watchObservedRunningTime="2025-12-01 07:08:24.963136317 +0000 UTC m=+1060.283944003" Dec 01 07:08:25 crc kubenswrapper[4822]: I1201 07:08:25.934303 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.162007 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-vlj27" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.179060 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-2vpl9" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.184948 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" podStartSLOduration=6.727356674 podStartE2EDuration="51.184926207s" podCreationTimestamp="2025-12-01 07:07:37 +0000 UTC" firstStartedPulling="2025-12-01 07:07:40.160391159 +0000 UTC m=+1015.481198845" lastFinishedPulling="2025-12-01 07:08:24.617960692 +0000 UTC m=+1059.938768378" observedRunningTime="2025-12-01 07:08:25.134671375 +0000 UTC m=+1060.455479061" watchObservedRunningTime="2025-12-01 07:08:28.184926207 +0000 UTC m=+1063.505733933" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.272319 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-49rk7" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.513306 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-6shpm" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.581024 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dmqx6" Dec 01 07:08:28 crc kubenswrapper[4822]: I1201 07:08:28.723724 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-wxp4b" Dec 01 07:08:30 crc kubenswrapper[4822]: I1201 07:08:30.455806 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-kmpbd" Dec 01 07:08:33 crc kubenswrapper[4822]: I1201 07:08:33.771179 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2s7q5" Dec 01 07:08:34 crc kubenswrapper[4822]: I1201 07:08:34.153735 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb4462f7ll" Dec 01 07:08:37 crc kubenswrapper[4822]: I1201 07:08:37.782670 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-fkvck" Dec 01 07:08:37 crc kubenswrapper[4822]: I1201 07:08:37.834959 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-275w4" Dec 01 07:08:38 crc kubenswrapper[4822]: I1201 07:08:38.491505 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mtshp" Dec 01 07:08:42 crc kubenswrapper[4822]: I1201 07:08:42.615383 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:08:42 crc kubenswrapper[4822]: I1201 07:08:42.615958 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:08:53 crc kubenswrapper[4822]: I1201 07:08:53.997794 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-557f57d995-55vxk"] Dec 01 07:08:53 crc kubenswrapper[4822]: I1201 07:08:53.999411 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.003823 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.003883 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.004262 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.004414 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-qwq6w" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.016092 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-55vxk"] Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.044295 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f03b65d-f2d8-4ac3-ac9b-932418566153-config\") pod \"dnsmasq-dns-557f57d995-55vxk\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.044439 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqr2c\" (UniqueName: \"kubernetes.io/projected/2f03b65d-f2d8-4ac3-ac9b-932418566153-kube-api-access-cqr2c\") pod \"dnsmasq-dns-557f57d995-55vxk\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.079051 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-v98cn"] Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.081575 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.083724 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.102221 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-v98cn"] Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.145542 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqr2c\" (UniqueName: \"kubernetes.io/projected/2f03b65d-f2d8-4ac3-ac9b-932418566153-kube-api-access-cqr2c\") pod \"dnsmasq-dns-557f57d995-55vxk\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.145653 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-config\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.145676 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-dns-svc\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.145721 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f03b65d-f2d8-4ac3-ac9b-932418566153-config\") pod \"dnsmasq-dns-557f57d995-55vxk\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.145747 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdgk4\" (UniqueName: \"kubernetes.io/projected/40f5477c-c24a-45f7-ad84-48f06a34304c-kube-api-access-bdgk4\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.147153 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f03b65d-f2d8-4ac3-ac9b-932418566153-config\") pod \"dnsmasq-dns-557f57d995-55vxk\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.168290 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqr2c\" (UniqueName: \"kubernetes.io/projected/2f03b65d-f2d8-4ac3-ac9b-932418566153-kube-api-access-cqr2c\") pod \"dnsmasq-dns-557f57d995-55vxk\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.246193 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-config\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.246244 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-dns-svc\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.246283 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdgk4\" (UniqueName: \"kubernetes.io/projected/40f5477c-c24a-45f7-ad84-48f06a34304c-kube-api-access-bdgk4\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.247657 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-config\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.247657 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-dns-svc\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.265702 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdgk4\" (UniqueName: \"kubernetes.io/projected/40f5477c-c24a-45f7-ad84-48f06a34304c-kube-api-access-bdgk4\") pod \"dnsmasq-dns-766fdc659c-v98cn\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.326409 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.404834 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.941633 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-55vxk"] Dec 01 07:08:54 crc kubenswrapper[4822]: I1201 07:08:54.948775 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:08:55 crc kubenswrapper[4822]: I1201 07:08:55.041289 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-v98cn"] Dec 01 07:08:55 crc kubenswrapper[4822]: W1201 07:08:55.045179 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40f5477c_c24a_45f7_ad84_48f06a34304c.slice/crio-b5051da51c91c2e484aebed92226a4107707da61977fbc45cd9c1ecb5e832c70 WatchSource:0}: Error finding container b5051da51c91c2e484aebed92226a4107707da61977fbc45cd9c1ecb5e832c70: Status 404 returned error can't find the container with id b5051da51c91c2e484aebed92226a4107707da61977fbc45cd9c1ecb5e832c70 Dec 01 07:08:55 crc kubenswrapper[4822]: I1201 07:08:55.218234 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-55vxk" event={"ID":"2f03b65d-f2d8-4ac3-ac9b-932418566153","Type":"ContainerStarted","Data":"d87bf5248996d4784c34707f6a05de7921a2c07737426d6477e7e380b56820a1"} Dec 01 07:08:55 crc kubenswrapper[4822]: I1201 07:08:55.219912 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" event={"ID":"40f5477c-c24a-45f7-ad84-48f06a34304c","Type":"ContainerStarted","Data":"b5051da51c91c2e484aebed92226a4107707da61977fbc45cd9c1ecb5e832c70"} Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.396212 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-55vxk"] Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.438333 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-rjc4t"] Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.440504 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.450404 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-rjc4t"] Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.536944 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-config\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.536999 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-dns-svc\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.537027 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-825xf\" (UniqueName: \"kubernetes.io/projected/bf39297e-a183-46ed-9b65-963a6dbad591-kube-api-access-825xf\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.638187 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-825xf\" (UniqueName: \"kubernetes.io/projected/bf39297e-a183-46ed-9b65-963a6dbad591-kube-api-access-825xf\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.638309 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-config\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.638335 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-dns-svc\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.639289 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-dns-svc\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.639646 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-config\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.684655 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-825xf\" (UniqueName: \"kubernetes.io/projected/bf39297e-a183-46ed-9b65-963a6dbad591-kube-api-access-825xf\") pod \"dnsmasq-dns-57dc4c6697-rjc4t\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.767222 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-v98cn"] Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.769355 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.807710 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-rlq5d"] Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.810250 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.817005 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-rlq5d"] Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.944210 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb7tq\" (UniqueName: \"kubernetes.io/projected/3f202041-53e6-477f-89fb-943b93fae588-kube-api-access-qb7tq\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.944285 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-config\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:56 crc kubenswrapper[4822]: I1201 07:08:56.944311 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.045617 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-config\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.045669 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.045756 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb7tq\" (UniqueName: \"kubernetes.io/projected/3f202041-53e6-477f-89fb-943b93fae588-kube-api-access-qb7tq\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.046496 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-config\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.046641 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.078490 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb7tq\" (UniqueName: \"kubernetes.io/projected/3f202041-53e6-477f-89fb-943b93fae588-kube-api-access-qb7tq\") pod \"dnsmasq-dns-8446fd7c75-rlq5d\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.154704 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.486998 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-rjc4t"] Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.714995 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.717903 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.722605 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.722605 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.722796 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.724766 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.724863 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.725086 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.725100 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6kz6n" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.725335 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.831428 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-rlq5d"] Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884735 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884805 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884834 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884861 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884888 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884924 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-server-conf\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884953 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884974 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4mn7\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-kube-api-access-h4mn7\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.884996 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.885021 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/da2985c5-716e-43ad-b892-ea29d88fa639-pod-info\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.885066 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/da2985c5-716e-43ad-b892-ea29d88fa639-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.934324 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.935726 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.942130 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.942135 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.942848 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.943120 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.943336 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-xvdfx" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.948498 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.966783 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.967584 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987081 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987171 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-server-conf\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987217 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987244 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4mn7\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-kube-api-access-h4mn7\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987280 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987303 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/da2985c5-716e-43ad-b892-ea29d88fa639-pod-info\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987363 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/da2985c5-716e-43ad-b892-ea29d88fa639-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987403 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987431 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987453 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.987476 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.989535 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.990847 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.991096 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.992859 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.993081 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.995933 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/da2985c5-716e-43ad-b892-ea29d88fa639-pod-info\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.997400 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.997912 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-server-conf\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:57 crc kubenswrapper[4822]: I1201 07:08:57.997944 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/da2985c5-716e-43ad-b892-ea29d88fa639-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.001010 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.018425 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4mn7\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-kube-api-access-h4mn7\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.062485 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " pod="openstack/rabbitmq-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091374 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091584 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091682 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79brf\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-kube-api-access-79brf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091706 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091755 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1229c08-35a5-4f16-8334-f32bb9b852b6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091846 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091914 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1229c08-35a5-4f16-8334-f32bb9b852b6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091939 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.091977 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.092063 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.092138 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193490 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193537 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193582 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193616 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79brf\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-kube-api-access-79brf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193636 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193660 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1229c08-35a5-4f16-8334-f32bb9b852b6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193700 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193719 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1229c08-35a5-4f16-8334-f32bb9b852b6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193734 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193748 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193772 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.193917 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.198391 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.198540 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.198743 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.199223 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.199645 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.202744 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1229c08-35a5-4f16-8334-f32bb9b852b6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.212248 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.212934 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1229c08-35a5-4f16-8334-f32bb9b852b6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.219135 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79brf\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-kube-api-access-79brf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.224215 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.228066 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.269108 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.385450 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.545996 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" event={"ID":"bf39297e-a183-46ed-9b65-963a6dbad591","Type":"ContainerStarted","Data":"c868e1fc072db2301b7277de1b8089a7c8836df4815e7cd7f5ad3fc25df6d7c4"} Dec 01 07:08:58 crc kubenswrapper[4822]: I1201 07:08:58.551176 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" event={"ID":"3f202041-53e6-477f-89fb-943b93fae588","Type":"ContainerStarted","Data":"e4b9d8aa40f112db3026f0546dae9d126885bbb51556dafae6a39e7a9246c7da"} Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.160201 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.161397 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.161475 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.167877 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.168755 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.170349 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.171346 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.173198 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-8tfbd" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394116 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394707 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394774 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvcxd\" (UniqueName: \"kubernetes.io/projected/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kube-api-access-cvcxd\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394804 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394831 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394870 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394899 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kolla-config\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.394934 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-default\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.436721 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:08:59 crc kubenswrapper[4822]: W1201 07:08:59.483209 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda2985c5_716e_43ad_b892_ea29d88fa639.slice/crio-3b70674c1661bf90811cb699f003b7aa68430cfcb82f91da21dfcdebe07a7455 WatchSource:0}: Error finding container 3b70674c1661bf90811cb699f003b7aa68430cfcb82f91da21dfcdebe07a7455: Status 404 returned error can't find the container with id 3b70674c1661bf90811cb699f003b7aa68430cfcb82f91da21dfcdebe07a7455 Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.491356 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496497 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496540 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496603 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496806 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kolla-config\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496854 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-default\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496896 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.496952 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.497003 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvcxd\" (UniqueName: \"kubernetes.io/projected/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kube-api-access-cvcxd\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.505281 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.505757 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kolla-config\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.506002 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.506428 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.506819 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-default\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.512593 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.521498 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.534947 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvcxd\" (UniqueName: \"kubernetes.io/projected/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kube-api-access-cvcxd\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.541385 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " pod="openstack/openstack-galera-0" Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.577701 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"da2985c5-716e-43ad-b892-ea29d88fa639","Type":"ContainerStarted","Data":"3b70674c1661bf90811cb699f003b7aa68430cfcb82f91da21dfcdebe07a7455"} Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.579716 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1229c08-35a5-4f16-8334-f32bb9b852b6","Type":"ContainerStarted","Data":"052345f8cc8e328db9fdb06a5022ab676f27f53deca15d085b31e977883310d7"} Dec 01 07:08:59 crc kubenswrapper[4822]: I1201 07:08:59.806590 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.493108 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.501028 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.507369 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-ljtfx" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.507734 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.508192 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.509323 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.510022 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.549114 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.549724 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.549754 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.549780 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.552315 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.552400 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.552519 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.552614 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tmwf\" (UniqueName: \"kubernetes.io/projected/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kube-api-access-7tmwf\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.655972 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656021 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656050 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656084 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656128 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tmwf\" (UniqueName: \"kubernetes.io/projected/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kube-api-access-7tmwf\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656170 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656222 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656244 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.656783 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.661234 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.665826 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.665893 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.669471 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.672957 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.684496 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.693634 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tmwf\" (UniqueName: \"kubernetes.io/projected/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kube-api-access-7tmwf\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.755811 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.830139 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.831233 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.844126 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.844429 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.844537 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-8q8gd" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.850785 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.870936 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 07:09:00 crc kubenswrapper[4822]: W1201 07:09:00.874041 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad18272e_45a9_40cd_8b46_2de8cb3a31be.slice/crio-6cb795531bfa3ea33e39225f945520d108064825ef5e9a8424025b33ce664262 WatchSource:0}: Error finding container 6cb795531bfa3ea33e39225f945520d108064825ef5e9a8424025b33ce664262: Status 404 returned error can't find the container with id 6cb795531bfa3ea33e39225f945520d108064825ef5e9a8424025b33ce664262 Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.894785 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.962121 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-config-data\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.962488 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.962541 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-kolla-config\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.962598 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:00 crc kubenswrapper[4822]: I1201 07:09:00.962620 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x42f\" (UniqueName: \"kubernetes.io/projected/baab33a8-22b8-4097-8c91-73d5f005fdf7-kube-api-access-7x42f\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.064489 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.064533 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x42f\" (UniqueName: \"kubernetes.io/projected/baab33a8-22b8-4097-8c91-73d5f005fdf7-kube-api-access-7x42f\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.064586 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-config-data\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.064636 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.064682 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-kolla-config\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.067385 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-kolla-config\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.067623 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-config-data\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.072188 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.100349 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.109191 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x42f\" (UniqueName: \"kubernetes.io/projected/baab33a8-22b8-4097-8c91-73d5f005fdf7-kube-api-access-7x42f\") pod \"memcached-0\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.253765 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.640284 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 07:09:01 crc kubenswrapper[4822]: I1201 07:09:01.736003 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ad18272e-45a9-40cd-8b46-2de8cb3a31be","Type":"ContainerStarted","Data":"6cb795531bfa3ea33e39225f945520d108064825ef5e9a8424025b33ce664262"} Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.031377 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 07:09:02 crc kubenswrapper[4822]: W1201 07:09:02.036950 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbaab33a8_22b8_4097_8c91_73d5f005fdf7.slice/crio-e410087fab5cfe7c763eb2aa572de8903db0720ce743da2ef666d7bdecbe0b67 WatchSource:0}: Error finding container e410087fab5cfe7c763eb2aa572de8903db0720ce743da2ef666d7bdecbe0b67: Status 404 returned error can't find the container with id e410087fab5cfe7c763eb2aa572de8903db0720ce743da2ef666d7bdecbe0b67 Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.533518 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.535035 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.537899 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-sz44w" Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.551964 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.595844 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st4b9\" (UniqueName: \"kubernetes.io/projected/47ea8f0f-c6c9-412a-b158-878fe3107ed5-kube-api-access-st4b9\") pod \"kube-state-metrics-0\" (UID: \"47ea8f0f-c6c9-412a-b158-878fe3107ed5\") " pod="openstack/kube-state-metrics-0" Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.697126 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st4b9\" (UniqueName: \"kubernetes.io/projected/47ea8f0f-c6c9-412a-b158-878fe3107ed5-kube-api-access-st4b9\") pod \"kube-state-metrics-0\" (UID: \"47ea8f0f-c6c9-412a-b158-878fe3107ed5\") " pod="openstack/kube-state-metrics-0" Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.754241 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st4b9\" (UniqueName: \"kubernetes.io/projected/47ea8f0f-c6c9-412a-b158-878fe3107ed5-kube-api-access-st4b9\") pod \"kube-state-metrics-0\" (UID: \"47ea8f0f-c6c9-412a-b158-878fe3107ed5\") " pod="openstack/kube-state-metrics-0" Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.805714 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"baab33a8-22b8-4097-8c91-73d5f005fdf7","Type":"ContainerStarted","Data":"e410087fab5cfe7c763eb2aa572de8903db0720ce743da2ef666d7bdecbe0b67"} Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.807483 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a","Type":"ContainerStarted","Data":"db0ec99f3583f720a0f43a152aaed9bf3fa0b08d2ff592581a36a0a984a1688f"} Dec 01 07:09:02 crc kubenswrapper[4822]: I1201 07:09:02.942844 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:09:03 crc kubenswrapper[4822]: W1201 07:09:03.730511 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47ea8f0f_c6c9_412a_b158_878fe3107ed5.slice/crio-3422bd0ef123c42d4c51d085787aad448cc12613ee444aa5a0f2186e69f21990 WatchSource:0}: Error finding container 3422bd0ef123c42d4c51d085787aad448cc12613ee444aa5a0f2186e69f21990: Status 404 returned error can't find the container with id 3422bd0ef123c42d4c51d085787aad448cc12613ee444aa5a0f2186e69f21990 Dec 01 07:09:03 crc kubenswrapper[4822]: I1201 07:09:03.743141 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:09:03 crc kubenswrapper[4822]: I1201 07:09:03.818266 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"47ea8f0f-c6c9-412a-b158-878fe3107ed5","Type":"ContainerStarted","Data":"3422bd0ef123c42d4c51d085787aad448cc12613ee444aa5a0f2186e69f21990"} Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.042252 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-q78vl"] Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.044372 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.062465 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.062688 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.063030 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-j52wh" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.077565 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-q78vl"] Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.092836 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5prf9"] Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.094914 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.124151 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5prf9"] Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.151752 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-log-ovn\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.151843 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-ovn-controller-tls-certs\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.151900 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6wch\" (UniqueName: \"kubernetes.io/projected/27be7b1c-254b-4dd5-8889-1373d3281e64-kube-api-access-v6wch\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.151921 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.151961 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run-ovn\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.151994 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-combined-ca-bundle\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152019 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/add830fb-5a2f-4cc2-8998-32ca893263db-scripts\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152041 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-run\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152073 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fvlg\" (UniqueName: \"kubernetes.io/projected/add830fb-5a2f-4cc2-8998-32ca893263db-kube-api-access-4fvlg\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152091 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27be7b1c-254b-4dd5-8889-1373d3281e64-scripts\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152114 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-lib\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152142 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-log\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.152160 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-etc-ovs\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255296 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run-ovn\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255381 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-combined-ca-bundle\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255436 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/add830fb-5a2f-4cc2-8998-32ca893263db-scripts\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255463 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-run\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255514 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fvlg\" (UniqueName: \"kubernetes.io/projected/add830fb-5a2f-4cc2-8998-32ca893263db-kube-api-access-4fvlg\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255536 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27be7b1c-254b-4dd5-8889-1373d3281e64-scripts\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255872 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-lib\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255894 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-log\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255933 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-etc-ovs\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255951 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-log-ovn\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.255977 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-ovn-controller-tls-certs\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.256040 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6wch\" (UniqueName: \"kubernetes.io/projected/27be7b1c-254b-4dd5-8889-1373d3281e64-kube-api-access-v6wch\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.256057 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.256737 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.256825 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-run\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.258955 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/add830fb-5a2f-4cc2-8998-32ca893263db-scripts\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.259130 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run-ovn\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.259842 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27be7b1c-254b-4dd5-8889-1373d3281e64-scripts\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.259977 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-lib\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.260087 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-log\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.260190 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-etc-ovs\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.260257 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-log-ovn\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.267026 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-combined-ca-bundle\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.289373 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-ovn-controller-tls-certs\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.292651 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6wch\" (UniqueName: \"kubernetes.io/projected/27be7b1c-254b-4dd5-8889-1373d3281e64-kube-api-access-v6wch\") pod \"ovn-controller-ovs-5prf9\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.293813 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fvlg\" (UniqueName: \"kubernetes.io/projected/add830fb-5a2f-4cc2-8998-32ca893263db-kube-api-access-4fvlg\") pod \"ovn-controller-q78vl\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.409847 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl" Dec 01 07:09:05 crc kubenswrapper[4822]: I1201 07:09:05.424051 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.103477 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.107828 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.110086 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.110228 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-6zp88" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.110338 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.110731 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.112054 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.134754 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173405 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173446 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173470 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173491 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173558 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173592 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-config\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173610 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt5v5\" (UniqueName: \"kubernetes.io/projected/13737e91-3ce4-4bcc-a605-d1591596c446-kube-api-access-wt5v5\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.173636 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274724 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-config\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274787 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt5v5\" (UniqueName: \"kubernetes.io/projected/13737e91-3ce4-4bcc-a605-d1591596c446-kube-api-access-wt5v5\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274831 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274877 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274905 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274941 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.274965 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.275024 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.275992 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-config\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.276716 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.276977 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.277229 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.372799 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.372844 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.372901 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt5v5\" (UniqueName: \"kubernetes.io/projected/13737e91-3ce4-4bcc-a605-d1591596c446-kube-api-access-wt5v5\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.375096 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.384566 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:08 crc kubenswrapper[4822]: I1201 07:09:08.439570 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:09 crc kubenswrapper[4822]: I1201 07:09:09.990858 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 07:09:09 crc kubenswrapper[4822]: I1201 07:09:09.992897 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:09 crc kubenswrapper[4822]: I1201 07:09:09.999943 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.000154 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-g9xxb" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.005502 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.005778 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.010789 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/735cd01e-931e-44b0-968d-ebe114278896-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.010829 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.010856 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qbwf\" (UniqueName: \"kubernetes.io/projected/735cd01e-931e-44b0-968d-ebe114278896-kube-api-access-5qbwf\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.011037 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.011090 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.011111 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-config\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.011198 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.011223 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.024276 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.113715 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.113747 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-config\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.113785 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.113804 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.113992 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.115240 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/735cd01e-931e-44b0-968d-ebe114278896-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.115276 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.115300 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qbwf\" (UniqueName: \"kubernetes.io/projected/735cd01e-931e-44b0-968d-ebe114278896-kube-api-access-5qbwf\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.115364 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.115544 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-config\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.116465 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.117047 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/735cd01e-931e-44b0-968d-ebe114278896-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.129450 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.129785 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.130106 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.132026 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qbwf\" (UniqueName: \"kubernetes.io/projected/735cd01e-931e-44b0-968d-ebe114278896-kube-api-access-5qbwf\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.135316 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:10 crc kubenswrapper[4822]: I1201 07:09:10.323100 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:12 crc kubenswrapper[4822]: I1201 07:09:12.542810 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:09:12 crc kubenswrapper[4822]: I1201 07:09:12.543266 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:09:20 crc kubenswrapper[4822]: E1201 07:09:20.630620 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52" Dec 01 07:09:20 crc kubenswrapper[4822]: E1201 07:09:20.631832 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7tmwf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(50ff5a68-493b-4e62-9e88-a8f1d0b4d78a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:20 crc kubenswrapper[4822]: E1201 07:09:20.633194 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" Dec 01 07:09:21 crc kubenswrapper[4822]: E1201 07:09:21.022422 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" Dec 01 07:09:21 crc kubenswrapper[4822]: E1201 07:09:21.625736 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46" Dec 01 07:09:21 crc kubenswrapper[4822]: E1201 07:09:21.625938 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-79brf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(a1229c08-35a5-4f16-8334-f32bb9b852b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:21 crc kubenswrapper[4822]: E1201 07:09:21.627182 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" Dec 01 07:09:22 crc kubenswrapper[4822]: E1201 07:09:22.028299 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" Dec 01 07:09:24 crc kubenswrapper[4822]: E1201 07:09:24.321467 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52" Dec 01 07:09:24 crc kubenswrapper[4822]: E1201 07:09:24.321926 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cvcxd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(ad18272e-45a9-40cd-8b46-2de8cb3a31be): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:24 crc kubenswrapper[4822]: E1201 07:09:24.325954 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" Dec 01 07:09:24 crc kubenswrapper[4822]: E1201 07:09:24.326010 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46" Dec 01 07:09:24 crc kubenswrapper[4822]: E1201 07:09:24.326299 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h4mn7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(da2985c5-716e-43ad-b892-ea29d88fa639): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:24 crc kubenswrapper[4822]: E1201 07:09:24.327712 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" Dec 01 07:09:25 crc kubenswrapper[4822]: E1201 07:09:25.052677 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52\\\"\"" pod="openstack/openstack-galera-0" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" Dec 01 07:09:25 crc kubenswrapper[4822]: E1201 07:09:25.055831 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46\\\"\"" pod="openstack/rabbitmq-server-0" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" Dec 01 07:09:25 crc kubenswrapper[4822]: E1201 07:09:25.115653 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c" Dec 01 07:09:25 crc kubenswrapper[4822]: E1201 07:09:25.115883 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n65dh5d5h598h66h5fbh5cbh67dh5b5h65dh5c4h55fh64fh594h64dh55ch5dbhf5h7dh68dh674h547h59bh569h5bbhbh676h65fh9chbdh64bh5d6h5b5q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7x42f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(baab33a8-22b8-4097-8c91-73d5f005fdf7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:25 crc kubenswrapper[4822]: E1201 07:09:25.117126 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" Dec 01 07:09:25 crc kubenswrapper[4822]: I1201 07:09:25.857346 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-q78vl"] Dec 01 07:09:26 crc kubenswrapper[4822]: E1201 07:09:26.058104 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c\\\"\"" pod="openstack/memcached-0" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" Dec 01 07:09:30 crc kubenswrapper[4822]: W1201 07:09:30.543805 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podadd830fb_5a2f_4cc2_8998_32ca893263db.slice/crio-ccc98990c366b3efc3c1761b077d7d9ba2c9ae7570eb83510ac1bfa4f323df8f WatchSource:0}: Error finding container ccc98990c366b3efc3c1761b077d7d9ba2c9ae7570eb83510ac1bfa4f323df8f: Status 404 returned error can't find the container with id ccc98990c366b3efc3c1761b077d7d9ba2c9ae7570eb83510ac1bfa4f323df8f Dec 01 07:09:31 crc kubenswrapper[4822]: I1201 07:09:31.093068 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl" event={"ID":"add830fb-5a2f-4cc2-8998-32ca893263db","Type":"ContainerStarted","Data":"ccc98990c366b3efc3c1761b077d7d9ba2c9ae7570eb83510ac1bfa4f323df8f"} Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.776394 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.776584 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qb7tq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8446fd7c75-rlq5d_openstack(3f202041-53e6-477f-89fb-943b93fae588): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.777800 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" podUID="3f202041-53e6-477f-89fb-943b93fae588" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.786995 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.787138 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bdgk4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-766fdc659c-v98cn_openstack(40f5477c-c24a-45f7-ad84-48f06a34304c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.790802 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" podUID="40f5477c-c24a-45f7-ad84-48f06a34304c" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.813136 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.813333 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-825xf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57dc4c6697-rjc4t_openstack(bf39297e-a183-46ed-9b65-963a6dbad591): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.814960 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" podUID="bf39297e-a183-46ed-9b65-963a6dbad591" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.857613 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.857806 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cqr2c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-557f57d995-55vxk_openstack(2f03b65d-f2d8-4ac3-ac9b-932418566153): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:09:31 crc kubenswrapper[4822]: E1201 07:09:31.859451 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-557f57d995-55vxk" podUID="2f03b65d-f2d8-4ac3-ac9b-932418566153" Dec 01 07:09:32 crc kubenswrapper[4822]: E1201 07:09:32.113505 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627\\\"\"" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" podUID="3f202041-53e6-477f-89fb-943b93fae588" Dec 01 07:09:32 crc kubenswrapper[4822]: E1201 07:09:32.113904 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627\\\"\"" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" podUID="bf39297e-a183-46ed-9b65-963a6dbad591" Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.498269 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 07:09:32 crc kubenswrapper[4822]: W1201 07:09:32.504683 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13737e91_3ce4_4bcc_a605_d1591596c446.slice/crio-536960d21bed17fa788b37641e7e72c8b8bf72204717961977574d0a30be0e39 WatchSource:0}: Error finding container 536960d21bed17fa788b37641e7e72c8b8bf72204717961977574d0a30be0e39: Status 404 returned error can't find the container with id 536960d21bed17fa788b37641e7e72c8b8bf72204717961977574d0a30be0e39 Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.603716 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.717359 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5prf9"] Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.746276 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.913498 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdgk4\" (UniqueName: \"kubernetes.io/projected/40f5477c-c24a-45f7-ad84-48f06a34304c-kube-api-access-bdgk4\") pod \"40f5477c-c24a-45f7-ad84-48f06a34304c\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.913566 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-dns-svc\") pod \"40f5477c-c24a-45f7-ad84-48f06a34304c\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.913730 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-config\") pod \"40f5477c-c24a-45f7-ad84-48f06a34304c\" (UID: \"40f5477c-c24a-45f7-ad84-48f06a34304c\") " Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.914129 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "40f5477c-c24a-45f7-ad84-48f06a34304c" (UID: "40f5477c-c24a-45f7-ad84-48f06a34304c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.914161 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-config" (OuterVolumeSpecName: "config") pod "40f5477c-c24a-45f7-ad84-48f06a34304c" (UID: "40f5477c-c24a-45f7-ad84-48f06a34304c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:32 crc kubenswrapper[4822]: I1201 07:09:32.918152 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40f5477c-c24a-45f7-ad84-48f06a34304c-kube-api-access-bdgk4" (OuterVolumeSpecName: "kube-api-access-bdgk4") pod "40f5477c-c24a-45f7-ad84-48f06a34304c" (UID: "40f5477c-c24a-45f7-ad84-48f06a34304c"). InnerVolumeSpecName "kube-api-access-bdgk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.015979 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.016031 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdgk4\" (UniqueName: \"kubernetes.io/projected/40f5477c-c24a-45f7-ad84-48f06a34304c-kube-api-access-bdgk4\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.016048 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40f5477c-c24a-45f7-ad84-48f06a34304c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:33 crc kubenswrapper[4822]: E1201 07:09:33.109840 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Dec 01 07:09:33 crc kubenswrapper[4822]: E1201 07:09:33.109875 4822 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Dec 01 07:09:33 crc kubenswrapper[4822]: E1201 07:09:33.109969 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-st4b9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(47ea8f0f-c6c9-412a-b158-878fe3107ed5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 07:09:33 crc kubenswrapper[4822]: E1201 07:09:33.111060 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.121367 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerStarted","Data":"9702b1e37b18f11a39a73a0540181eade0bf1cb0369eec7100718fedeeac86d9"} Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.122528 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"13737e91-3ce4-4bcc-a605-d1591596c446","Type":"ContainerStarted","Data":"536960d21bed17fa788b37641e7e72c8b8bf72204717961977574d0a30be0e39"} Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.123476 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-55vxk" event={"ID":"2f03b65d-f2d8-4ac3-ac9b-932418566153","Type":"ContainerDied","Data":"d87bf5248996d4784c34707f6a05de7921a2c07737426d6477e7e380b56820a1"} Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.123512 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d87bf5248996d4784c34707f6a05de7921a2c07737426d6477e7e380b56820a1" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.124300 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"735cd01e-931e-44b0-968d-ebe114278896","Type":"ContainerStarted","Data":"ad310feb175d6d9c6ee5fbc0fc3d797cb49078408c0079fa88833b54376865d1"} Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.124961 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" event={"ID":"40f5477c-c24a-45f7-ad84-48f06a34304c","Type":"ContainerDied","Data":"b5051da51c91c2e484aebed92226a4107707da61977fbc45cd9c1ecb5e832c70"} Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.125020 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-v98cn" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.268873 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.324111 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-v98cn"] Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.329980 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-v98cn"] Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.421699 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqr2c\" (UniqueName: \"kubernetes.io/projected/2f03b65d-f2d8-4ac3-ac9b-932418566153-kube-api-access-cqr2c\") pod \"2f03b65d-f2d8-4ac3-ac9b-932418566153\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.421735 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f03b65d-f2d8-4ac3-ac9b-932418566153-config\") pod \"2f03b65d-f2d8-4ac3-ac9b-932418566153\" (UID: \"2f03b65d-f2d8-4ac3-ac9b-932418566153\") " Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.422504 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f03b65d-f2d8-4ac3-ac9b-932418566153-config" (OuterVolumeSpecName: "config") pod "2f03b65d-f2d8-4ac3-ac9b-932418566153" (UID: "2f03b65d-f2d8-4ac3-ac9b-932418566153"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.426785 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f03b65d-f2d8-4ac3-ac9b-932418566153-kube-api-access-cqr2c" (OuterVolumeSpecName: "kube-api-access-cqr2c") pod "2f03b65d-f2d8-4ac3-ac9b-932418566153" (UID: "2f03b65d-f2d8-4ac3-ac9b-932418566153"). InnerVolumeSpecName "kube-api-access-cqr2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.523839 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqr2c\" (UniqueName: \"kubernetes.io/projected/2f03b65d-f2d8-4ac3-ac9b-932418566153-kube-api-access-cqr2c\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:33 crc kubenswrapper[4822]: I1201 07:09:33.523880 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f03b65d-f2d8-4ac3-ac9b-932418566153-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:34 crc kubenswrapper[4822]: I1201 07:09:34.139443 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a","Type":"ContainerStarted","Data":"7141addb9b9ef0268fb154fd7e37bf5e2bf29947673a762e71450248e473f5d6"} Dec 01 07:09:34 crc kubenswrapper[4822]: I1201 07:09:34.139477 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-55vxk" Dec 01 07:09:34 crc kubenswrapper[4822]: E1201 07:09:34.158790 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb\\\"\"" pod="openstack/kube-state-metrics-0" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" Dec 01 07:09:34 crc kubenswrapper[4822]: I1201 07:09:34.236729 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-55vxk"] Dec 01 07:09:34 crc kubenswrapper[4822]: I1201 07:09:34.248111 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-55vxk"] Dec 01 07:09:34 crc kubenswrapper[4822]: I1201 07:09:34.959516 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f03b65d-f2d8-4ac3-ac9b-932418566153" path="/var/lib/kubelet/pods/2f03b65d-f2d8-4ac3-ac9b-932418566153/volumes" Dec 01 07:09:34 crc kubenswrapper[4822]: I1201 07:09:34.959887 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40f5477c-c24a-45f7-ad84-48f06a34304c" path="/var/lib/kubelet/pods/40f5477c-c24a-45f7-ad84-48f06a34304c/volumes" Dec 01 07:09:36 crc kubenswrapper[4822]: I1201 07:09:36.156875 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"735cd01e-931e-44b0-968d-ebe114278896","Type":"ContainerStarted","Data":"d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a"} Dec 01 07:09:36 crc kubenswrapper[4822]: I1201 07:09:36.158939 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl" event={"ID":"add830fb-5a2f-4cc2-8998-32ca893263db","Type":"ContainerStarted","Data":"df2d6aa20ff2fa8c870d3a35b396ea04c4a1dde752f113ceeea95e9f64d72321"} Dec 01 07:09:36 crc kubenswrapper[4822]: I1201 07:09:36.159039 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-q78vl" Dec 01 07:09:36 crc kubenswrapper[4822]: I1201 07:09:36.161140 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerStarted","Data":"1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5"} Dec 01 07:09:36 crc kubenswrapper[4822]: I1201 07:09:36.164328 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"13737e91-3ce4-4bcc-a605-d1591596c446","Type":"ContainerStarted","Data":"47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61"} Dec 01 07:09:36 crc kubenswrapper[4822]: I1201 07:09:36.182347 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-q78vl" podStartSLOduration=27.069293748 podStartE2EDuration="32.182326749s" podCreationTimestamp="2025-12-01 07:09:04 +0000 UTC" firstStartedPulling="2025-12-01 07:09:30.546336489 +0000 UTC m=+1125.867144175" lastFinishedPulling="2025-12-01 07:09:35.65936946 +0000 UTC m=+1130.980177176" observedRunningTime="2025-12-01 07:09:36.176609229 +0000 UTC m=+1131.497416915" watchObservedRunningTime="2025-12-01 07:09:36.182326749 +0000 UTC m=+1131.503134445" Dec 01 07:09:37 crc kubenswrapper[4822]: I1201 07:09:37.175219 4822 generic.go:334] "Generic (PLEG): container finished" podID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerID="1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5" exitCode=0 Dec 01 07:09:37 crc kubenswrapper[4822]: I1201 07:09:37.175443 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerDied","Data":"1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5"} Dec 01 07:09:37 crc kubenswrapper[4822]: I1201 07:09:37.178582 4822 generic.go:334] "Generic (PLEG): container finished" podID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerID="7141addb9b9ef0268fb154fd7e37bf5e2bf29947673a762e71450248e473f5d6" exitCode=0 Dec 01 07:09:37 crc kubenswrapper[4822]: I1201 07:09:37.178838 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a","Type":"ContainerDied","Data":"7141addb9b9ef0268fb154fd7e37bf5e2bf29947673a762e71450248e473f5d6"} Dec 01 07:09:38 crc kubenswrapper[4822]: I1201 07:09:38.188425 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a","Type":"ContainerStarted","Data":"d67bae7c70f17ec833b424c5adfa870275d1a2d4526a3192bf63ea5af23f096a"} Dec 01 07:09:38 crc kubenswrapper[4822]: I1201 07:09:38.190082 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"da2985c5-716e-43ad-b892-ea29d88fa639","Type":"ContainerStarted","Data":"d49bbf181bc9328d72a73c564789a29b58507dddaa3e5e79ce55c2b497a8f7ef"} Dec 01 07:09:38 crc kubenswrapper[4822]: I1201 07:09:38.193884 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerStarted","Data":"2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05"} Dec 01 07:09:38 crc kubenswrapper[4822]: I1201 07:09:38.199213 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1229c08-35a5-4f16-8334-f32bb9b852b6","Type":"ContainerStarted","Data":"3a3f116481c73dfe5a9a8a3545e38ee6bdbcf5cf5bee9252fffc9f947030fbe5"} Dec 01 07:09:38 crc kubenswrapper[4822]: I1201 07:09:38.254394 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.84236465 podStartE2EDuration="39.254369052s" podCreationTimestamp="2025-12-01 07:08:59 +0000 UTC" firstStartedPulling="2025-12-01 07:09:01.737478698 +0000 UTC m=+1097.058286384" lastFinishedPulling="2025-12-01 07:09:33.1494831 +0000 UTC m=+1128.470290786" observedRunningTime="2025-12-01 07:09:38.21442561 +0000 UTC m=+1133.535233366" watchObservedRunningTime="2025-12-01 07:09:38.254369052 +0000 UTC m=+1133.575176768" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.285831 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerStarted","Data":"c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0"} Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.287334 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.287970 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ad18272e-45a9-40cd-8b46-2de8cb3a31be","Type":"ContainerStarted","Data":"47b85be0256b3129ddbeda68dea127dd2fa6e5d1670aef3677449c16013f88ce"} Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.288066 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.289519 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"13737e91-3ce4-4bcc-a605-d1591596c446","Type":"ContainerStarted","Data":"399ace0799f2c11abaf11cd88fb9941adf1ed68dea1ffe69cd7f58d95ad171e4"} Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.292050 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"baab33a8-22b8-4097-8c91-73d5f005fdf7","Type":"ContainerStarted","Data":"9d2642fba08e82f7fab50725c4908dd7f41042b114c66469823905f1b4131ec5"} Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.292403 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.293922 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"735cd01e-931e-44b0-968d-ebe114278896","Type":"ContainerStarted","Data":"fd4e18f76e0e6f5823923b10ab11728a84798dceeb11a8364e726a5f18115231"} Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.318331 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5prf9" podStartSLOduration=33.768403642 podStartE2EDuration="36.318297486s" podCreationTimestamp="2025-12-01 07:09:04 +0000 UTC" firstStartedPulling="2025-12-01 07:09:33.109541177 +0000 UTC m=+1128.430348863" lastFinishedPulling="2025-12-01 07:09:35.659434991 +0000 UTC m=+1130.980242707" observedRunningTime="2025-12-01 07:09:40.313027258 +0000 UTC m=+1135.633834954" watchObservedRunningTime="2025-12-01 07:09:40.318297486 +0000 UTC m=+1135.639105182" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.324785 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.324851 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.347201 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=26.185506691 podStartE2EDuration="32.347170598s" podCreationTimestamp="2025-12-01 07:09:08 +0000 UTC" firstStartedPulling="2025-12-01 07:09:32.716282263 +0000 UTC m=+1128.037089949" lastFinishedPulling="2025-12-01 07:09:38.87794615 +0000 UTC m=+1134.198753856" observedRunningTime="2025-12-01 07:09:40.334861682 +0000 UTC m=+1135.655669368" watchObservedRunningTime="2025-12-01 07:09:40.347170598 +0000 UTC m=+1135.667978304" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.362642 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=26.991111958 podStartE2EDuration="33.362616292s" podCreationTimestamp="2025-12-01 07:09:07 +0000 UTC" firstStartedPulling="2025-12-01 07:09:32.507428153 +0000 UTC m=+1127.828235829" lastFinishedPulling="2025-12-01 07:09:38.878932477 +0000 UTC m=+1134.199740163" observedRunningTime="2025-12-01 07:09:40.351667214 +0000 UTC m=+1135.672474900" watchObservedRunningTime="2025-12-01 07:09:40.362616292 +0000 UTC m=+1135.683423988" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.372960 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.537365724 podStartE2EDuration="40.372939592s" podCreationTimestamp="2025-12-01 07:09:00 +0000 UTC" firstStartedPulling="2025-12-01 07:09:02.043662218 +0000 UTC m=+1097.364469904" lastFinishedPulling="2025-12-01 07:09:38.879236086 +0000 UTC m=+1134.200043772" observedRunningTime="2025-12-01 07:09:40.370138674 +0000 UTC m=+1135.690946360" watchObservedRunningTime="2025-12-01 07:09:40.372939592 +0000 UTC m=+1135.693747278" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.381920 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.851593 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:40 crc kubenswrapper[4822]: I1201 07:09:40.852758 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:41 crc kubenswrapper[4822]: I1201 07:09:41.360667 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 01 07:09:41 crc kubenswrapper[4822]: I1201 07:09:41.603095 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:41 crc kubenswrapper[4822]: I1201 07:09:41.878247 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.313595 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-rjc4t"] Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.344048 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.370214 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-rsh6z"] Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.377648 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.381056 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.430121 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-rsh6z"] Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.467860 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-8t992"] Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.469096 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.472957 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.493602 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-8t992"] Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.534065 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.542408 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.542456 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.542496 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.543281 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9f465aacecd8042506a98ec45b2a943f80875679afbe9996cf6b8ba8198e3f7e"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.543336 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://9f465aacecd8042506a98ec45b2a943f80875679afbe9996cf6b8ba8198e3f7e" gracePeriod=600 Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.966922 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa49318e-e2cd-40c4-910c-3e91feae5e73-config\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.967502 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-config\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.967773 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-ovsdbserver-sb\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.967816 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovn-rundir\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.967839 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.967878 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-combined-ca-bundle\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.967920 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvhb2\" (UniqueName: \"kubernetes.io/projected/fa49318e-e2cd-40c4-910c-3e91feae5e73-kube-api-access-cvhb2\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.968049 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gdxt\" (UniqueName: \"kubernetes.io/projected/e77584e1-8ef2-420f-a83b-cedbafe33c07-kube-api-access-4gdxt\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.968171 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovs-rundir\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:42 crc kubenswrapper[4822]: I1201 07:09:42.968193 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-dns-svc\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083534 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa49318e-e2cd-40c4-910c-3e91feae5e73-config\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083601 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-config\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083660 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-ovsdbserver-sb\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083676 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovn-rundir\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083697 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083711 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-combined-ca-bundle\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083728 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvhb2\" (UniqueName: \"kubernetes.io/projected/fa49318e-e2cd-40c4-910c-3e91feae5e73-kube-api-access-cvhb2\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083760 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gdxt\" (UniqueName: \"kubernetes.io/projected/e77584e1-8ef2-420f-a83b-cedbafe33c07-kube-api-access-4gdxt\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083795 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovs-rundir\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.083810 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-dns-svc\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.084683 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa49318e-e2cd-40c4-910c-3e91feae5e73-config\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.112192 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovn-rundir\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.112932 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-config\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.112977 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovs-rundir\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.114608 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-dns-svc\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.142104 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-combined-ca-bundle\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.144829 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-ovsdbserver-sb\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.145991 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gdxt\" (UniqueName: \"kubernetes.io/projected/e77584e1-8ef2-420f-a83b-cedbafe33c07-kube-api-access-4gdxt\") pod \"dnsmasq-dns-b7ccdcb4f-rsh6z\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.149372 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvhb2\" (UniqueName: \"kubernetes.io/projected/fa49318e-e2cd-40c4-910c-3e91feae5e73-kube-api-access-cvhb2\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.150061 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8t992\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.156221 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-rlq5d"] Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.187606 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-24c59"] Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.189071 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.190766 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.195633 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-24c59"] Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.287384 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-sb\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.287444 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-dns-svc\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.287495 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-nb\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.287535 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr9cr\" (UniqueName: \"kubernetes.io/projected/65412c24-2f8a-4821-b4a6-128a47ea8295-kube-api-access-lr9cr\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.287605 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-config\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.324048 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.347339 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.349724 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.352875 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.353173 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-mhcm5" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.353302 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.353306 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.362584 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="9f465aacecd8042506a98ec45b2a943f80875679afbe9996cf6b8ba8198e3f7e" exitCode=0 Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.363884 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"9f465aacecd8042506a98ec45b2a943f80875679afbe9996cf6b8ba8198e3f7e"} Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.363955 4822 scope.go:117] "RemoveContainer" containerID="6baf3d5e41a621d0e59cbb384ffe06f0de93d5916d8dc51ecf89a3a235ed2c54" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.364329 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.389624 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-sb\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.389669 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-dns-svc\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.389716 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-nb\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.389762 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr9cr\" (UniqueName: \"kubernetes.io/projected/65412c24-2f8a-4821-b4a6-128a47ea8295-kube-api-access-lr9cr\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.389870 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-config\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.390866 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-config\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.391484 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-dns-svc\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.391629 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-nb\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.391662 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-sb\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.409001 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr9cr\" (UniqueName: \"kubernetes.io/projected/65412c24-2f8a-4821-b4a6-128a47ea8295-kube-api-access-lr9cr\") pod \"dnsmasq-dns-5bd7c66845-24c59\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.428275 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502376 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtkc2\" (UniqueName: \"kubernetes.io/projected/fd2326e5-f8a2-47ca-8519-576caa1825c5-kube-api-access-dtkc2\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502412 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-scripts\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502447 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502497 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502642 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-config\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502669 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.502730 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.529506 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.610845 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtkc2\" (UniqueName: \"kubernetes.io/projected/fd2326e5-f8a2-47ca-8519-576caa1825c5-kube-api-access-dtkc2\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.611153 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-scripts\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.611178 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.611198 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.611238 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-config\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.611255 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.611277 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.616077 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.617406 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-scripts\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.668724 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.669279 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-config\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.671400 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.671990 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.673623 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtkc2\" (UniqueName: \"kubernetes.io/projected/fd2326e5-f8a2-47ca-8519-576caa1825c5-kube-api-access-dtkc2\") pod \"ovn-northd-0\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.676740 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.727476 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.818646 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-config\") pod \"bf39297e-a183-46ed-9b65-963a6dbad591\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.818788 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-825xf\" (UniqueName: \"kubernetes.io/projected/bf39297e-a183-46ed-9b65-963a6dbad591-kube-api-access-825xf\") pod \"bf39297e-a183-46ed-9b65-963a6dbad591\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.818855 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-dns-svc\") pod \"bf39297e-a183-46ed-9b65-963a6dbad591\" (UID: \"bf39297e-a183-46ed-9b65-963a6dbad591\") " Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.820521 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bf39297e-a183-46ed-9b65-963a6dbad591" (UID: "bf39297e-a183-46ed-9b65-963a6dbad591"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.821111 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-config" (OuterVolumeSpecName: "config") pod "bf39297e-a183-46ed-9b65-963a6dbad591" (UID: "bf39297e-a183-46ed-9b65-963a6dbad591"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.840364 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf39297e-a183-46ed-9b65-963a6dbad591-kube-api-access-825xf" (OuterVolumeSpecName: "kube-api-access-825xf") pod "bf39297e-a183-46ed-9b65-963a6dbad591" (UID: "bf39297e-a183-46ed-9b65-963a6dbad591"). InnerVolumeSpecName "kube-api-access-825xf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.923232 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.923263 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-825xf\" (UniqueName: \"kubernetes.io/projected/bf39297e-a183-46ed-9b65-963a6dbad591-kube-api-access-825xf\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.923275 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf39297e-a183-46ed-9b65-963a6dbad591-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:43 crc kubenswrapper[4822]: I1201 07:09:43.978743 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.026482 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-dns-svc\") pod \"3f202041-53e6-477f-89fb-943b93fae588\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.027130 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-config\") pod \"3f202041-53e6-477f-89fb-943b93fae588\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.027224 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb7tq\" (UniqueName: \"kubernetes.io/projected/3f202041-53e6-477f-89fb-943b93fae588-kube-api-access-qb7tq\") pod \"3f202041-53e6-477f-89fb-943b93fae588\" (UID: \"3f202041-53e6-477f-89fb-943b93fae588\") " Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.027245 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3f202041-53e6-477f-89fb-943b93fae588" (UID: "3f202041-53e6-477f-89fb-943b93fae588"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.027863 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.092882 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-config" (OuterVolumeSpecName: "config") pod "3f202041-53e6-477f-89fb-943b93fae588" (UID: "3f202041-53e6-477f-89fb-943b93fae588"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.117404 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f202041-53e6-477f-89fb-943b93fae588-kube-api-access-qb7tq" (OuterVolumeSpecName: "kube-api-access-qb7tq") pod "3f202041-53e6-477f-89fb-943b93fae588" (UID: "3f202041-53e6-477f-89fb-943b93fae588"). InnerVolumeSpecName "kube-api-access-qb7tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.131966 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f202041-53e6-477f-89fb-943b93fae588-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.131998 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb7tq\" (UniqueName: \"kubernetes.io/projected/3f202041-53e6-477f-89fb-943b93fae588-kube-api-access-qb7tq\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.370216 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" event={"ID":"3f202041-53e6-477f-89fb-943b93fae588","Type":"ContainerDied","Data":"e4b9d8aa40f112db3026f0546dae9d126885bbb51556dafae6a39e7a9246c7da"} Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.370562 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-rlq5d" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.376864 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" event={"ID":"bf39297e-a183-46ed-9b65-963a6dbad591","Type":"ContainerDied","Data":"c868e1fc072db2301b7277de1b8089a7c8836df4815e7cd7f5ad3fc25df6d7c4"} Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.376905 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-rjc4t" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.453419 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-rlq5d"] Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.474191 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-rlq5d"] Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.488443 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-rjc4t"] Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.494043 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-rjc4t"] Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.595123 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-24c59"] Dec 01 07:09:44 crc kubenswrapper[4822]: W1201 07:09:44.598854 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65412c24_2f8a_4821_b4a6_128a47ea8295.slice/crio-f3a4c47f90e0c305a691c0964de7fb1ac52a802a009eb734ddcd270a60efd023 WatchSource:0}: Error finding container f3a4c47f90e0c305a691c0964de7fb1ac52a802a009eb734ddcd270a60efd023: Status 404 returned error can't find the container with id f3a4c47f90e0c305a691c0964de7fb1ac52a802a009eb734ddcd270a60efd023 Dec 01 07:09:44 crc kubenswrapper[4822]: W1201 07:09:44.600883 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd2326e5_f8a2_47ca_8519_576caa1825c5.slice/crio-eaddab33352b9ab1ec42dbd68af1d6e936784e8dd4b6d4ad6e2bd967e94a5fd3 WatchSource:0}: Error finding container eaddab33352b9ab1ec42dbd68af1d6e936784e8dd4b6d4ad6e2bd967e94a5fd3: Status 404 returned error can't find the container with id eaddab33352b9ab1ec42dbd68af1d6e936784e8dd4b6d4ad6e2bd967e94a5fd3 Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.601722 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.678483 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-8t992"] Dec 01 07:09:44 crc kubenswrapper[4822]: W1201 07:09:44.678871 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa49318e_e2cd_40c4_910c_3e91feae5e73.slice/crio-a42b6c757cf819d4fbef0bd7a4a60def699ba518c97e8564d8133d6a20609e7c WatchSource:0}: Error finding container a42b6c757cf819d4fbef0bd7a4a60def699ba518c97e8564d8133d6a20609e7c: Status 404 returned error can't find the container with id a42b6c757cf819d4fbef0bd7a4a60def699ba518c97e8564d8133d6a20609e7c Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.706280 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-rsh6z"] Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.967024 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f202041-53e6-477f-89fb-943b93fae588" path="/var/lib/kubelet/pods/3f202041-53e6-477f-89fb-943b93fae588/volumes" Dec 01 07:09:44 crc kubenswrapper[4822]: I1201 07:09:44.968828 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf39297e-a183-46ed-9b65-963a6dbad591" path="/var/lib/kubelet/pods/bf39297e-a183-46ed-9b65-963a6dbad591/volumes" Dec 01 07:09:45 crc kubenswrapper[4822]: I1201 07:09:45.388611 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" event={"ID":"65412c24-2f8a-4821-b4a6-128a47ea8295","Type":"ContainerStarted","Data":"f3a4c47f90e0c305a691c0964de7fb1ac52a802a009eb734ddcd270a60efd023"} Dec 01 07:09:45 crc kubenswrapper[4822]: I1201 07:09:45.390045 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" event={"ID":"e77584e1-8ef2-420f-a83b-cedbafe33c07","Type":"ContainerStarted","Data":"187219aeb39040bd051b3d178b02ae9cf7b1250f1e3bd912fe609abd6cfd55cb"} Dec 01 07:09:45 crc kubenswrapper[4822]: I1201 07:09:45.391497 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fd2326e5-f8a2-47ca-8519-576caa1825c5","Type":"ContainerStarted","Data":"eaddab33352b9ab1ec42dbd68af1d6e936784e8dd4b6d4ad6e2bd967e94a5fd3"} Dec 01 07:09:45 crc kubenswrapper[4822]: I1201 07:09:45.393086 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8t992" event={"ID":"fa49318e-e2cd-40c4-910c-3e91feae5e73","Type":"ContainerStarted","Data":"a42b6c757cf819d4fbef0bd7a4a60def699ba518c97e8564d8133d6a20609e7c"} Dec 01 07:09:46 crc kubenswrapper[4822]: I1201 07:09:46.254769 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 01 07:09:49 crc kubenswrapper[4822]: I1201 07:09:49.419274 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:49 crc kubenswrapper[4822]: I1201 07:09:49.498542 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8t992" event={"ID":"fa49318e-e2cd-40c4-910c-3e91feae5e73","Type":"ContainerStarted","Data":"97d660e529a0aaa28a2d2a3858d4da0114b11b557d1f19a98f4c4659e698d302"} Dec 01 07:09:49 crc kubenswrapper[4822]: I1201 07:09:49.501777 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"cb68b029768127b77693597ceb7a762b8c61f40a1a25b9306ea20cdd9dcb63b5"} Dec 01 07:09:49 crc kubenswrapper[4822]: I1201 07:09:49.520790 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-8t992" podStartSLOduration=7.520774017 podStartE2EDuration="7.520774017s" podCreationTimestamp="2025-12-01 07:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:09:49.518962707 +0000 UTC m=+1144.839770383" watchObservedRunningTime="2025-12-01 07:09:49.520774017 +0000 UTC m=+1144.841581703" Dec 01 07:09:49 crc kubenswrapper[4822]: I1201 07:09:49.572115 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.509155 4822 generic.go:334] "Generic (PLEG): container finished" podID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerID="acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a" exitCode=0 Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.509325 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" event={"ID":"65412c24-2f8a-4821-b4a6-128a47ea8295","Type":"ContainerDied","Data":"acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a"} Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.511964 4822 generic.go:334] "Generic (PLEG): container finished" podID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerID="fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8" exitCode=0 Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.512026 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" event={"ID":"e77584e1-8ef2-420f-a83b-cedbafe33c07","Type":"ContainerDied","Data":"fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8"} Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.514268 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fd2326e5-f8a2-47ca-8519-576caa1825c5","Type":"ContainerStarted","Data":"d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5"} Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.514326 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fd2326e5-f8a2-47ca-8519-576caa1825c5","Type":"ContainerStarted","Data":"131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038"} Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.514568 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.517251 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"47ea8f0f-c6c9-412a-b158-878fe3107ed5","Type":"ContainerStarted","Data":"33e9da4f4f59b718ecc9296f695fb4aadf4f7deb003919c6c2aeb2a3222f2d2e"} Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.517700 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.559821 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.835189806 podStartE2EDuration="48.559802803s" podCreationTimestamp="2025-12-01 07:09:02 +0000 UTC" firstStartedPulling="2025-12-01 07:09:03.740175027 +0000 UTC m=+1099.060982713" lastFinishedPulling="2025-12-01 07:09:49.464788014 +0000 UTC m=+1144.785595710" observedRunningTime="2025-12-01 07:09:50.558854036 +0000 UTC m=+1145.879661722" watchObservedRunningTime="2025-12-01 07:09:50.559802803 +0000 UTC m=+1145.880610489" Dec 01 07:09:50 crc kubenswrapper[4822]: I1201 07:09:50.577541 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.20585602 podStartE2EDuration="7.577520731s" podCreationTimestamp="2025-12-01 07:09:43 +0000 UTC" firstStartedPulling="2025-12-01 07:09:44.608751506 +0000 UTC m=+1139.929559202" lastFinishedPulling="2025-12-01 07:09:49.980416227 +0000 UTC m=+1145.301223913" observedRunningTime="2025-12-01 07:09:50.575045191 +0000 UTC m=+1145.895852877" watchObservedRunningTime="2025-12-01 07:09:50.577520731 +0000 UTC m=+1145.898328417" Dec 01 07:09:51 crc kubenswrapper[4822]: I1201 07:09:51.529754 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" event={"ID":"65412c24-2f8a-4821-b4a6-128a47ea8295","Type":"ContainerStarted","Data":"7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c"} Dec 01 07:09:51 crc kubenswrapper[4822]: I1201 07:09:51.530287 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:51 crc kubenswrapper[4822]: I1201 07:09:51.532612 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" event={"ID":"e77584e1-8ef2-420f-a83b-cedbafe33c07","Type":"ContainerStarted","Data":"2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd"} Dec 01 07:09:51 crc kubenswrapper[4822]: I1201 07:09:51.560345 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" podStartSLOduration=3.99960657 podStartE2EDuration="8.560321576s" podCreationTimestamp="2025-12-01 07:09:43 +0000 UTC" firstStartedPulling="2025-12-01 07:09:44.601726429 +0000 UTC m=+1139.922534105" lastFinishedPulling="2025-12-01 07:09:49.162441385 +0000 UTC m=+1144.483249111" observedRunningTime="2025-12-01 07:09:51.556753356 +0000 UTC m=+1146.877561052" watchObservedRunningTime="2025-12-01 07:09:51.560321576 +0000 UTC m=+1146.881129302" Dec 01 07:09:51 crc kubenswrapper[4822]: I1201 07:09:51.580000 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" podStartSLOduration=5.06969429 podStartE2EDuration="9.579976259s" podCreationTimestamp="2025-12-01 07:09:42 +0000 UTC" firstStartedPulling="2025-12-01 07:09:44.806011941 +0000 UTC m=+1140.126819627" lastFinishedPulling="2025-12-01 07:09:49.31629391 +0000 UTC m=+1144.637101596" observedRunningTime="2025-12-01 07:09:51.57398858 +0000 UTC m=+1146.894796306" watchObservedRunningTime="2025-12-01 07:09:51.579976259 +0000 UTC m=+1146.900783985" Dec 01 07:09:52 crc kubenswrapper[4822]: I1201 07:09:52.550435 4822 generic.go:334] "Generic (PLEG): container finished" podID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerID="47b85be0256b3129ddbeda68dea127dd2fa6e5d1670aef3677449c16013f88ce" exitCode=0 Dec 01 07:09:52 crc kubenswrapper[4822]: I1201 07:09:52.550635 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ad18272e-45a9-40cd-8b46-2de8cb3a31be","Type":"ContainerDied","Data":"47b85be0256b3129ddbeda68dea127dd2fa6e5d1670aef3677449c16013f88ce"} Dec 01 07:09:52 crc kubenswrapper[4822]: I1201 07:09:52.552262 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.430908 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-rsh6z"] Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.506227 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-nhz8d"] Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.507558 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.533149 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-nhz8d"] Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.546232 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-config\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.546316 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9zpd\" (UniqueName: \"kubernetes.io/projected/de738eee-1f47-42ae-be49-a65d2fe8ea3e-kube-api-access-x9zpd\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.546352 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.546407 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-dns-svc\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.546435 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.564741 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ad18272e-45a9-40cd-8b46-2de8cb3a31be","Type":"ContainerStarted","Data":"bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453"} Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.593648 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371981.261143 podStartE2EDuration="55.59363291s" podCreationTimestamp="2025-12-01 07:08:58 +0000 UTC" firstStartedPulling="2025-12-01 07:09:00.934912216 +0000 UTC m=+1096.255719902" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:09:53.593024053 +0000 UTC m=+1148.913831739" watchObservedRunningTime="2025-12-01 07:09:53.59363291 +0000 UTC m=+1148.914440596" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.648081 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9zpd\" (UniqueName: \"kubernetes.io/projected/de738eee-1f47-42ae-be49-a65d2fe8ea3e-kube-api-access-x9zpd\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.648148 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.648219 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-dns-svc\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.649102 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.649286 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-dns-svc\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.649362 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.650131 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.651069 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-config\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.651772 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-config\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.670636 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9zpd\" (UniqueName: \"kubernetes.io/projected/de738eee-1f47-42ae-be49-a65d2fe8ea3e-kube-api-access-x9zpd\") pod \"dnsmasq-dns-5f6d79597f-nhz8d\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:53 crc kubenswrapper[4822]: I1201 07:09:53.826038 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.354842 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-nhz8d"] Dec 01 07:09:54 crc kubenswrapper[4822]: W1201 07:09:54.363174 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde738eee_1f47_42ae_be49_a65d2fe8ea3e.slice/crio-90f49e6fd14e9fa620ef061317805d458724e6f50c007be364fe1ff073490986 WatchSource:0}: Error finding container 90f49e6fd14e9fa620ef061317805d458724e6f50c007be364fe1ff073490986: Status 404 returned error can't find the container with id 90f49e6fd14e9fa620ef061317805d458724e6f50c007be364fe1ff073490986 Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.496080 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.501267 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.505166 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.505315 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.505360 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.505697 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-wvns9" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.527635 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.565723 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.565812 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcd8h\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-kube-api-access-tcd8h\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.565833 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-lock\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.565854 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.565929 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-cache\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.571877 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" event={"ID":"de738eee-1f47-42ae-be49-a65d2fe8ea3e","Type":"ContainerStarted","Data":"90f49e6fd14e9fa620ef061317805d458724e6f50c007be364fe1ff073490986"} Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.572026 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerName="dnsmasq-dns" containerID="cri-o://2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd" gracePeriod=10 Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.667804 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-cache\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.667900 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.667965 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcd8h\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-kube-api-access-tcd8h\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.667993 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-lock\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.668018 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: E1201 07:09:54.668102 4822 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 07:09:54 crc kubenswrapper[4822]: E1201 07:09:54.668136 4822 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 07:09:54 crc kubenswrapper[4822]: E1201 07:09:54.668188 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift podName:b21643f1-db8c-4613-ac29-f1d4d0970b7b nodeName:}" failed. No retries permitted until 2025-12-01 07:09:55.168169814 +0000 UTC m=+1150.488977500 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift") pod "swift-storage-0" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b") : configmap "swift-ring-files" not found Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.668384 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.668507 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-cache\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.668592 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-lock\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.690480 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcd8h\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-kube-api-access-tcd8h\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:54 crc kubenswrapper[4822]: I1201 07:09:54.693603 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.011276 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.073843 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-config\") pod \"e77584e1-8ef2-420f-a83b-cedbafe33c07\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.073998 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-ovsdbserver-sb\") pod \"e77584e1-8ef2-420f-a83b-cedbafe33c07\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.074033 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-dns-svc\") pod \"e77584e1-8ef2-420f-a83b-cedbafe33c07\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.074062 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gdxt\" (UniqueName: \"kubernetes.io/projected/e77584e1-8ef2-420f-a83b-cedbafe33c07-kube-api-access-4gdxt\") pod \"e77584e1-8ef2-420f-a83b-cedbafe33c07\" (UID: \"e77584e1-8ef2-420f-a83b-cedbafe33c07\") " Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.085787 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e77584e1-8ef2-420f-a83b-cedbafe33c07-kube-api-access-4gdxt" (OuterVolumeSpecName: "kube-api-access-4gdxt") pod "e77584e1-8ef2-420f-a83b-cedbafe33c07" (UID: "e77584e1-8ef2-420f-a83b-cedbafe33c07"). InnerVolumeSpecName "kube-api-access-4gdxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.122132 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-config" (OuterVolumeSpecName: "config") pod "e77584e1-8ef2-420f-a83b-cedbafe33c07" (UID: "e77584e1-8ef2-420f-a83b-cedbafe33c07"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.143977 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e77584e1-8ef2-420f-a83b-cedbafe33c07" (UID: "e77584e1-8ef2-420f-a83b-cedbafe33c07"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.172370 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e77584e1-8ef2-420f-a83b-cedbafe33c07" (UID: "e77584e1-8ef2-420f-a83b-cedbafe33c07"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.176023 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.176099 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.176110 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.176122 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e77584e1-8ef2-420f-a83b-cedbafe33c07-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.176131 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gdxt\" (UniqueName: \"kubernetes.io/projected/e77584e1-8ef2-420f-a83b-cedbafe33c07-kube-api-access-4gdxt\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:55 crc kubenswrapper[4822]: E1201 07:09:55.176246 4822 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 07:09:55 crc kubenswrapper[4822]: E1201 07:09:55.176278 4822 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 07:09:55 crc kubenswrapper[4822]: E1201 07:09:55.176348 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift podName:b21643f1-db8c-4613-ac29-f1d4d0970b7b nodeName:}" failed. No retries permitted until 2025-12-01 07:09:56.176325298 +0000 UTC m=+1151.497132994 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift") pod "swift-storage-0" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b") : configmap "swift-ring-files" not found Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.592912 4822 generic.go:334] "Generic (PLEG): container finished" podID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerID="2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd" exitCode=0 Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.592972 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" event={"ID":"e77584e1-8ef2-420f-a83b-cedbafe33c07","Type":"ContainerDied","Data":"2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd"} Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.593002 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" event={"ID":"e77584e1-8ef2-420f-a83b-cedbafe33c07","Type":"ContainerDied","Data":"187219aeb39040bd051b3d178b02ae9cf7b1250f1e3bd912fe609abd6cfd55cb"} Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.593023 4822 scope.go:117] "RemoveContainer" containerID="2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.593151 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-rsh6z" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.598176 4822 generic.go:334] "Generic (PLEG): container finished" podID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerID="87b9f05058a99e738bfa21d7abf037109225480fd440e2e419d20c8faa67000a" exitCode=0 Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.598220 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" event={"ID":"de738eee-1f47-42ae-be49-a65d2fe8ea3e","Type":"ContainerDied","Data":"87b9f05058a99e738bfa21d7abf037109225480fd440e2e419d20c8faa67000a"} Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.628824 4822 scope.go:117] "RemoveContainer" containerID="fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.663312 4822 scope.go:117] "RemoveContainer" containerID="2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd" Dec 01 07:09:55 crc kubenswrapper[4822]: E1201 07:09:55.664837 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd\": container with ID starting with 2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd not found: ID does not exist" containerID="2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.664894 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd"} err="failed to get container status \"2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd\": rpc error: code = NotFound desc = could not find container \"2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd\": container with ID starting with 2891045efe16b53e2ffb072c0c040dbba7b34800cd676d18a38fd4a7332af0dd not found: ID does not exist" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.664941 4822 scope.go:117] "RemoveContainer" containerID="fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8" Dec 01 07:09:55 crc kubenswrapper[4822]: E1201 07:09:55.667232 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8\": container with ID starting with fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8 not found: ID does not exist" containerID="fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.667259 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8"} err="failed to get container status \"fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8\": rpc error: code = NotFound desc = could not find container \"fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8\": container with ID starting with fa0590248c295a4eb269cabc413aace4bcad6d51d924d45f9d2b57fdc9a7a3a8 not found: ID does not exist" Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.682328 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-rsh6z"] Dec 01 07:09:55 crc kubenswrapper[4822]: I1201 07:09:55.692987 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-rsh6z"] Dec 01 07:09:56 crc kubenswrapper[4822]: I1201 07:09:56.200842 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:56 crc kubenswrapper[4822]: E1201 07:09:56.201114 4822 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 07:09:56 crc kubenswrapper[4822]: E1201 07:09:56.201598 4822 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 07:09:56 crc kubenswrapper[4822]: E1201 07:09:56.201691 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift podName:b21643f1-db8c-4613-ac29-f1d4d0970b7b nodeName:}" failed. No retries permitted until 2025-12-01 07:09:58.201665248 +0000 UTC m=+1153.522472944 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift") pod "swift-storage-0" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b") : configmap "swift-ring-files" not found Dec 01 07:09:56 crc kubenswrapper[4822]: I1201 07:09:56.608643 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" event={"ID":"de738eee-1f47-42ae-be49-a65d2fe8ea3e","Type":"ContainerStarted","Data":"75bccd05fdf56db9e946daf05d599a452eb287174556653e7d9bb6d54491f25b"} Dec 01 07:09:56 crc kubenswrapper[4822]: I1201 07:09:56.608718 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:09:56 crc kubenswrapper[4822]: I1201 07:09:56.635032 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" podStartSLOduration=3.635006089 podStartE2EDuration="3.635006089s" podCreationTimestamp="2025-12-01 07:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:09:56.623956458 +0000 UTC m=+1151.944764144" watchObservedRunningTime="2025-12-01 07:09:56.635006089 +0000 UTC m=+1151.955813805" Dec 01 07:09:56 crc kubenswrapper[4822]: I1201 07:09:56.967595 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" path="/var/lib/kubelet/pods/e77584e1-8ef2-420f-a83b-cedbafe33c07/volumes" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.239160 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:09:58 crc kubenswrapper[4822]: E1201 07:09:58.239439 4822 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 07:09:58 crc kubenswrapper[4822]: E1201 07:09:58.239806 4822 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 07:09:58 crc kubenswrapper[4822]: E1201 07:09:58.239883 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift podName:b21643f1-db8c-4613-ac29-f1d4d0970b7b nodeName:}" failed. No retries permitted until 2025-12-01 07:10:02.239857999 +0000 UTC m=+1157.560665725 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift") pod "swift-storage-0" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b") : configmap "swift-ring-files" not found Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.372424 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-mvk28"] Dec 01 07:09:58 crc kubenswrapper[4822]: E1201 07:09:58.373157 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerName="init" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.373247 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerName="init" Dec 01 07:09:58 crc kubenswrapper[4822]: E1201 07:09:58.373358 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerName="dnsmasq-dns" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.373427 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerName="dnsmasq-dns" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.373713 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77584e1-8ef2-420f-a83b-cedbafe33c07" containerName="dnsmasq-dns" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.374537 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.377226 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.377252 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.377517 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.393559 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mvk28"] Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443223 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdhks\" (UniqueName: \"kubernetes.io/projected/a5762ef7-aac0-426f-a391-d50a06868dee-kube-api-access-rdhks\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443296 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5762ef7-aac0-426f-a391-d50a06868dee-etc-swift\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443320 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-combined-ca-bundle\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443338 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-ring-data-devices\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443366 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-swiftconf\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443414 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-scripts\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.443460 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-dispersionconf\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.532776 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545293 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-dispersionconf\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545362 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdhks\" (UniqueName: \"kubernetes.io/projected/a5762ef7-aac0-426f-a391-d50a06868dee-kube-api-access-rdhks\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545419 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5762ef7-aac0-426f-a391-d50a06868dee-etc-swift\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545436 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-combined-ca-bundle\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545451 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-ring-data-devices\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545486 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-swiftconf\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.545540 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-scripts\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.546243 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-scripts\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.546905 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-ring-data-devices\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.547375 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5762ef7-aac0-426f-a391-d50a06868dee-etc-swift\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.552944 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-swiftconf\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.567264 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-dispersionconf\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.568386 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-combined-ca-bundle\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.574260 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdhks\" (UniqueName: \"kubernetes.io/projected/a5762ef7-aac0-426f-a391-d50a06868dee-kube-api-access-rdhks\") pod \"swift-ring-rebalance-mvk28\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:58 crc kubenswrapper[4822]: I1201 07:09:58.693391 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:09:59 crc kubenswrapper[4822]: I1201 07:09:59.391523 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mvk28"] Dec 01 07:09:59 crc kubenswrapper[4822]: I1201 07:09:59.634663 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mvk28" event={"ID":"a5762ef7-aac0-426f-a391-d50a06868dee","Type":"ContainerStarted","Data":"1da6c85d219156a05b3822d9346dff966fd0fde319d53be9c7faf4be10ac622e"} Dec 01 07:09:59 crc kubenswrapper[4822]: I1201 07:09:59.807745 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 01 07:09:59 crc kubenswrapper[4822]: I1201 07:09:59.807810 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 01 07:09:59 crc kubenswrapper[4822]: I1201 07:09:59.925162 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 01 07:10:00 crc kubenswrapper[4822]: I1201 07:10:00.745683 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.246812 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-75d6-account-create-update-qqkzs"] Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.250313 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.253463 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.264192 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75d6-account-create-update-qqkzs"] Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.331069 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14899afe-5dfb-4985-bff9-f5d1611efb1d-operator-scripts\") pod \"placement-75d6-account-create-update-qqkzs\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.331137 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59vmt\" (UniqueName: \"kubernetes.io/projected/14899afe-5dfb-4985-bff9-f5d1611efb1d-kube-api-access-59vmt\") pod \"placement-75d6-account-create-update-qqkzs\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.336276 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-fv5zp"] Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.337424 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.351877 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-fv5zp"] Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.433082 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd4e4bde-d294-4379-b394-44d4b43371bc-operator-scripts\") pod \"placement-db-create-fv5zp\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.433236 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cths8\" (UniqueName: \"kubernetes.io/projected/cd4e4bde-d294-4379-b394-44d4b43371bc-kube-api-access-cths8\") pod \"placement-db-create-fv5zp\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.433296 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14899afe-5dfb-4985-bff9-f5d1611efb1d-operator-scripts\") pod \"placement-75d6-account-create-update-qqkzs\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.433324 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59vmt\" (UniqueName: \"kubernetes.io/projected/14899afe-5dfb-4985-bff9-f5d1611efb1d-kube-api-access-59vmt\") pod \"placement-75d6-account-create-update-qqkzs\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.434281 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14899afe-5dfb-4985-bff9-f5d1611efb1d-operator-scripts\") pod \"placement-75d6-account-create-update-qqkzs\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.481853 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59vmt\" (UniqueName: \"kubernetes.io/projected/14899afe-5dfb-4985-bff9-f5d1611efb1d-kube-api-access-59vmt\") pod \"placement-75d6-account-create-update-qqkzs\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.536062 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cths8\" (UniqueName: \"kubernetes.io/projected/cd4e4bde-d294-4379-b394-44d4b43371bc-kube-api-access-cths8\") pod \"placement-db-create-fv5zp\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.536357 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd4e4bde-d294-4379-b394-44d4b43371bc-operator-scripts\") pod \"placement-db-create-fv5zp\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.541291 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd4e4bde-d294-4379-b394-44d4b43371bc-operator-scripts\") pod \"placement-db-create-fv5zp\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.558879 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cths8\" (UniqueName: \"kubernetes.io/projected/cd4e4bde-d294-4379-b394-44d4b43371bc-kube-api-access-cths8\") pod \"placement-db-create-fv5zp\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.612869 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:01 crc kubenswrapper[4822]: I1201 07:10:01.655615 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:02 crc kubenswrapper[4822]: I1201 07:10:02.252181 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:10:02 crc kubenswrapper[4822]: E1201 07:10:02.252425 4822 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 07:10:02 crc kubenswrapper[4822]: E1201 07:10:02.252455 4822 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 07:10:02 crc kubenswrapper[4822]: E1201 07:10:02.252516 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift podName:b21643f1-db8c-4613-ac29-f1d4d0970b7b nodeName:}" failed. No retries permitted until 2025-12-01 07:10:10.252496899 +0000 UTC m=+1165.573304585 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift") pod "swift-storage-0" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b") : configmap "swift-ring-files" not found Dec 01 07:10:02 crc kubenswrapper[4822]: I1201 07:10:02.985498 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.678808 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mvk28" event={"ID":"a5762ef7-aac0-426f-a391-d50a06868dee","Type":"ContainerStarted","Data":"daeb2606271c3bc7ac98c2ea3f6c896b418e1dc4f9f359b298cabd839d6d1a12"} Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.698871 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-mvk28" podStartSLOduration=1.775971408 podStartE2EDuration="5.698852175s" podCreationTimestamp="2025-12-01 07:09:58 +0000 UTC" firstStartedPulling="2025-12-01 07:09:59.41530481 +0000 UTC m=+1154.736112496" lastFinishedPulling="2025-12-01 07:10:03.338185567 +0000 UTC m=+1158.658993263" observedRunningTime="2025-12-01 07:10:03.691851488 +0000 UTC m=+1159.012659174" watchObservedRunningTime="2025-12-01 07:10:03.698852175 +0000 UTC m=+1159.019659861" Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.743916 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.819394 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75d6-account-create-update-qqkzs"] Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.826689 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.892580 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-24c59"] Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.892795 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerName="dnsmasq-dns" containerID="cri-o://7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c" gracePeriod=10 Dec 01 07:10:03 crc kubenswrapper[4822]: I1201 07:10:03.947574 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-fv5zp"] Dec 01 07:10:03 crc kubenswrapper[4822]: W1201 07:10:03.988945 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd4e4bde_d294_4379_b394_44d4b43371bc.slice/crio-e20fa544b0fb0ab547236d6de50365de7fe1c4b242235e288189027cc7a2ef90 WatchSource:0}: Error finding container e20fa544b0fb0ab547236d6de50365de7fe1c4b242235e288189027cc7a2ef90: Status 404 returned error can't find the container with id e20fa544b0fb0ab547236d6de50365de7fe1c4b242235e288189027cc7a2ef90 Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.626605 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.691129 4822 generic.go:334] "Generic (PLEG): container finished" podID="14899afe-5dfb-4985-bff9-f5d1611efb1d" containerID="1a334e476417c310fa29ea5067dd72eaf0a9ff18b956d537dea22d61fde5ec46" exitCode=0 Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.691218 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75d6-account-create-update-qqkzs" event={"ID":"14899afe-5dfb-4985-bff9-f5d1611efb1d","Type":"ContainerDied","Data":"1a334e476417c310fa29ea5067dd72eaf0a9ff18b956d537dea22d61fde5ec46"} Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.691259 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75d6-account-create-update-qqkzs" event={"ID":"14899afe-5dfb-4985-bff9-f5d1611efb1d","Type":"ContainerStarted","Data":"8f72b4f3d2d1da5a6b144416264dfadfd0ef64af2162b777eaa0b7b588298872"} Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.692639 4822 generic.go:334] "Generic (PLEG): container finished" podID="cd4e4bde-d294-4379-b394-44d4b43371bc" containerID="0f19d42ba5cdb615196de8f1acc4e7feb55cd199f580a1514463d5627d504563" exitCode=0 Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.692702 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fv5zp" event={"ID":"cd4e4bde-d294-4379-b394-44d4b43371bc","Type":"ContainerDied","Data":"0f19d42ba5cdb615196de8f1acc4e7feb55cd199f580a1514463d5627d504563"} Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.692819 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fv5zp" event={"ID":"cd4e4bde-d294-4379-b394-44d4b43371bc","Type":"ContainerStarted","Data":"e20fa544b0fb0ab547236d6de50365de7fe1c4b242235e288189027cc7a2ef90"} Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.695610 4822 generic.go:334] "Generic (PLEG): container finished" podID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerID="7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c" exitCode=0 Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.696044 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.696591 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" event={"ID":"65412c24-2f8a-4821-b4a6-128a47ea8295","Type":"ContainerDied","Data":"7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c"} Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.696624 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-24c59" event={"ID":"65412c24-2f8a-4821-b4a6-128a47ea8295","Type":"ContainerDied","Data":"f3a4c47f90e0c305a691c0964de7fb1ac52a802a009eb734ddcd270a60efd023"} Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.696663 4822 scope.go:117] "RemoveContainer" containerID="7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.726449 4822 scope.go:117] "RemoveContainer" containerID="acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.740498 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr9cr\" (UniqueName: \"kubernetes.io/projected/65412c24-2f8a-4821-b4a6-128a47ea8295-kube-api-access-lr9cr\") pod \"65412c24-2f8a-4821-b4a6-128a47ea8295\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.740714 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-dns-svc\") pod \"65412c24-2f8a-4821-b4a6-128a47ea8295\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.740739 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-config\") pod \"65412c24-2f8a-4821-b4a6-128a47ea8295\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.740925 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-sb\") pod \"65412c24-2f8a-4821-b4a6-128a47ea8295\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.741024 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-nb\") pod \"65412c24-2f8a-4821-b4a6-128a47ea8295\" (UID: \"65412c24-2f8a-4821-b4a6-128a47ea8295\") " Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.746868 4822 scope.go:117] "RemoveContainer" containerID="7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c" Dec 01 07:10:04 crc kubenswrapper[4822]: E1201 07:10:04.747400 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c\": container with ID starting with 7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c not found: ID does not exist" containerID="7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.747470 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c"} err="failed to get container status \"7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c\": rpc error: code = NotFound desc = could not find container \"7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c\": container with ID starting with 7ff11e8be5fbccadb62f5affb44f6b81c2df0ac9e360251d3e18b63f9143e53c not found: ID does not exist" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.747504 4822 scope.go:117] "RemoveContainer" containerID="acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a" Dec 01 07:10:04 crc kubenswrapper[4822]: E1201 07:10:04.747812 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a\": container with ID starting with acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a not found: ID does not exist" containerID="acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.747836 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a"} err="failed to get container status \"acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a\": rpc error: code = NotFound desc = could not find container \"acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a\": container with ID starting with acaf968859322d847b4e3827e3faa6402584e2a3efd6f97b83eda7a1777aee5a not found: ID does not exist" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.752951 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65412c24-2f8a-4821-b4a6-128a47ea8295-kube-api-access-lr9cr" (OuterVolumeSpecName: "kube-api-access-lr9cr") pod "65412c24-2f8a-4821-b4a6-128a47ea8295" (UID: "65412c24-2f8a-4821-b4a6-128a47ea8295"). InnerVolumeSpecName "kube-api-access-lr9cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.800841 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-config" (OuterVolumeSpecName: "config") pod "65412c24-2f8a-4821-b4a6-128a47ea8295" (UID: "65412c24-2f8a-4821-b4a6-128a47ea8295"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.801408 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "65412c24-2f8a-4821-b4a6-128a47ea8295" (UID: "65412c24-2f8a-4821-b4a6-128a47ea8295"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.805270 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "65412c24-2f8a-4821-b4a6-128a47ea8295" (UID: "65412c24-2f8a-4821-b4a6-128a47ea8295"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.833142 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "65412c24-2f8a-4821-b4a6-128a47ea8295" (UID: "65412c24-2f8a-4821-b4a6-128a47ea8295"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.843673 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.843706 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.843716 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr9cr\" (UniqueName: \"kubernetes.io/projected/65412c24-2f8a-4821-b4a6-128a47ea8295-kube-api-access-lr9cr\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.843726 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:04 crc kubenswrapper[4822]: I1201 07:10:04.843735 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65412c24-2f8a-4821-b4a6-128a47ea8295-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:05 crc kubenswrapper[4822]: I1201 07:10:05.019998 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-24c59"] Dec 01 07:10:05 crc kubenswrapper[4822]: I1201 07:10:05.028816 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-24c59"] Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.258685 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.266949 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.378670 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cths8\" (UniqueName: \"kubernetes.io/projected/cd4e4bde-d294-4379-b394-44d4b43371bc-kube-api-access-cths8\") pod \"cd4e4bde-d294-4379-b394-44d4b43371bc\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.378737 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59vmt\" (UniqueName: \"kubernetes.io/projected/14899afe-5dfb-4985-bff9-f5d1611efb1d-kube-api-access-59vmt\") pod \"14899afe-5dfb-4985-bff9-f5d1611efb1d\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.378835 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd4e4bde-d294-4379-b394-44d4b43371bc-operator-scripts\") pod \"cd4e4bde-d294-4379-b394-44d4b43371bc\" (UID: \"cd4e4bde-d294-4379-b394-44d4b43371bc\") " Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.378885 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14899afe-5dfb-4985-bff9-f5d1611efb1d-operator-scripts\") pod \"14899afe-5dfb-4985-bff9-f5d1611efb1d\" (UID: \"14899afe-5dfb-4985-bff9-f5d1611efb1d\") " Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.379606 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd4e4bde-d294-4379-b394-44d4b43371bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cd4e4bde-d294-4379-b394-44d4b43371bc" (UID: "cd4e4bde-d294-4379-b394-44d4b43371bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.380276 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14899afe-5dfb-4985-bff9-f5d1611efb1d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "14899afe-5dfb-4985-bff9-f5d1611efb1d" (UID: "14899afe-5dfb-4985-bff9-f5d1611efb1d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.382495 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14899afe-5dfb-4985-bff9-f5d1611efb1d-kube-api-access-59vmt" (OuterVolumeSpecName: "kube-api-access-59vmt") pod "14899afe-5dfb-4985-bff9-f5d1611efb1d" (UID: "14899afe-5dfb-4985-bff9-f5d1611efb1d"). InnerVolumeSpecName "kube-api-access-59vmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.386011 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd4e4bde-d294-4379-b394-44d4b43371bc-kube-api-access-cths8" (OuterVolumeSpecName: "kube-api-access-cths8") pod "cd4e4bde-d294-4379-b394-44d4b43371bc" (UID: "cd4e4bde-d294-4379-b394-44d4b43371bc"). InnerVolumeSpecName "kube-api-access-cths8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.481609 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cths8\" (UniqueName: \"kubernetes.io/projected/cd4e4bde-d294-4379-b394-44d4b43371bc-kube-api-access-cths8\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.481700 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59vmt\" (UniqueName: \"kubernetes.io/projected/14899afe-5dfb-4985-bff9-f5d1611efb1d-kube-api-access-59vmt\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.481725 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd4e4bde-d294-4379-b394-44d4b43371bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.481777 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14899afe-5dfb-4985-bff9-f5d1611efb1d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.529937 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-hbjw6"] Dec 01 07:10:06 crc kubenswrapper[4822]: E1201 07:10:06.530272 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14899afe-5dfb-4985-bff9-f5d1611efb1d" containerName="mariadb-account-create-update" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530293 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="14899afe-5dfb-4985-bff9-f5d1611efb1d" containerName="mariadb-account-create-update" Dec 01 07:10:06 crc kubenswrapper[4822]: E1201 07:10:06.530310 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerName="init" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530321 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerName="init" Dec 01 07:10:06 crc kubenswrapper[4822]: E1201 07:10:06.530330 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerName="dnsmasq-dns" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530338 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerName="dnsmasq-dns" Dec 01 07:10:06 crc kubenswrapper[4822]: E1201 07:10:06.530348 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd4e4bde-d294-4379-b394-44d4b43371bc" containerName="mariadb-database-create" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530355 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd4e4bde-d294-4379-b394-44d4b43371bc" containerName="mariadb-database-create" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530528 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd4e4bde-d294-4379-b394-44d4b43371bc" containerName="mariadb-database-create" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530540 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" containerName="dnsmasq-dns" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.530567 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="14899afe-5dfb-4985-bff9-f5d1611efb1d" containerName="mariadb-account-create-update" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.531082 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.550086 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-hbjw6"] Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.614695 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-8845-account-create-update-rjq78"] Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.616257 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.618938 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.622241 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8845-account-create-update-rjq78"] Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.685340 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz8jp\" (UniqueName: \"kubernetes.io/projected/f369e477-2b99-4f5b-abb8-eb788818325f-kube-api-access-tz8jp\") pod \"glance-db-create-hbjw6\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.685430 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f369e477-2b99-4f5b-abb8-eb788818325f-operator-scripts\") pod \"glance-db-create-hbjw6\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.719174 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-fv5zp" event={"ID":"cd4e4bde-d294-4379-b394-44d4b43371bc","Type":"ContainerDied","Data":"e20fa544b0fb0ab547236d6de50365de7fe1c4b242235e288189027cc7a2ef90"} Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.719222 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e20fa544b0fb0ab547236d6de50365de7fe1c4b242235e288189027cc7a2ef90" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.719285 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-fv5zp" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.725923 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75d6-account-create-update-qqkzs" event={"ID":"14899afe-5dfb-4985-bff9-f5d1611efb1d","Type":"ContainerDied","Data":"8f72b4f3d2d1da5a6b144416264dfadfd0ef64af2162b777eaa0b7b588298872"} Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.725973 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f72b4f3d2d1da5a6b144416264dfadfd0ef64af2162b777eaa0b7b588298872" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.726035 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75d6-account-create-update-qqkzs" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.786848 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz8jp\" (UniqueName: \"kubernetes.io/projected/f369e477-2b99-4f5b-abb8-eb788818325f-kube-api-access-tz8jp\") pod \"glance-db-create-hbjw6\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.787315 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsrxg\" (UniqueName: \"kubernetes.io/projected/d6636d12-3187-445d-afaf-2218dd71d932-kube-api-access-fsrxg\") pod \"glance-8845-account-create-update-rjq78\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.787415 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f369e477-2b99-4f5b-abb8-eb788818325f-operator-scripts\") pod \"glance-db-create-hbjw6\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.787452 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6636d12-3187-445d-afaf-2218dd71d932-operator-scripts\") pod \"glance-8845-account-create-update-rjq78\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.788277 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f369e477-2b99-4f5b-abb8-eb788818325f-operator-scripts\") pod \"glance-db-create-hbjw6\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.818320 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz8jp\" (UniqueName: \"kubernetes.io/projected/f369e477-2b99-4f5b-abb8-eb788818325f-kube-api-access-tz8jp\") pod \"glance-db-create-hbjw6\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.850472 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.890818 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6636d12-3187-445d-afaf-2218dd71d932-operator-scripts\") pod \"glance-8845-account-create-update-rjq78\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.890901 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6636d12-3187-445d-afaf-2218dd71d932-operator-scripts\") pod \"glance-8845-account-create-update-rjq78\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.891144 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsrxg\" (UniqueName: \"kubernetes.io/projected/d6636d12-3187-445d-afaf-2218dd71d932-kube-api-access-fsrxg\") pod \"glance-8845-account-create-update-rjq78\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.914749 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsrxg\" (UniqueName: \"kubernetes.io/projected/d6636d12-3187-445d-afaf-2218dd71d932-kube-api-access-fsrxg\") pod \"glance-8845-account-create-update-rjq78\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.950876 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:06 crc kubenswrapper[4822]: I1201 07:10:06.963863 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65412c24-2f8a-4821-b4a6-128a47ea8295" path="/var/lib/kubelet/pods/65412c24-2f8a-4821-b4a6-128a47ea8295/volumes" Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.412315 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-hbjw6"] Dec 01 07:10:07 crc kubenswrapper[4822]: W1201 07:10:07.415855 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf369e477_2b99_4f5b_abb8_eb788818325f.slice/crio-3a5c1f341f527e7d49232ac348de4796c93e98b0e4c7f1b323a59d35d3cddce9 WatchSource:0}: Error finding container 3a5c1f341f527e7d49232ac348de4796c93e98b0e4c7f1b323a59d35d3cddce9: Status 404 returned error can't find the container with id 3a5c1f341f527e7d49232ac348de4796c93e98b0e4c7f1b323a59d35d3cddce9 Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.492281 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8845-account-create-update-rjq78"] Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.738977 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hbjw6" event={"ID":"f369e477-2b99-4f5b-abb8-eb788818325f","Type":"ContainerStarted","Data":"0fb01169b6f760501811400952165121826877ab0f41bcfb6de0a4bad911be52"} Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.739046 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hbjw6" event={"ID":"f369e477-2b99-4f5b-abb8-eb788818325f","Type":"ContainerStarted","Data":"3a5c1f341f527e7d49232ac348de4796c93e98b0e4c7f1b323a59d35d3cddce9"} Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.743980 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8845-account-create-update-rjq78" event={"ID":"d6636d12-3187-445d-afaf-2218dd71d932","Type":"ContainerStarted","Data":"449bd9a27b6680631de93239f213a795f8827ad6d0b52167e266af16c633201e"} Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.744035 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8845-account-create-update-rjq78" event={"ID":"d6636d12-3187-445d-afaf-2218dd71d932","Type":"ContainerStarted","Data":"0efa3fcbb3d499d3747bba77f1ffe4a4a142d4081d64f002d385ba536e756364"} Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.760940 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-hbjw6" podStartSLOduration=1.760921204 podStartE2EDuration="1.760921204s" podCreationTimestamp="2025-12-01 07:10:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:07.753719591 +0000 UTC m=+1163.074527287" watchObservedRunningTime="2025-12-01 07:10:07.760921204 +0000 UTC m=+1163.081728900" Dec 01 07:10:07 crc kubenswrapper[4822]: I1201 07:10:07.774596 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-8845-account-create-update-rjq78" podStartSLOduration=1.774580588 podStartE2EDuration="1.774580588s" podCreationTimestamp="2025-12-01 07:10:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:07.768232079 +0000 UTC m=+1163.089039765" watchObservedRunningTime="2025-12-01 07:10:07.774580588 +0000 UTC m=+1163.095388264" Dec 01 07:10:08 crc kubenswrapper[4822]: I1201 07:10:08.814084 4822 generic.go:334] "Generic (PLEG): container finished" podID="d6636d12-3187-445d-afaf-2218dd71d932" containerID="449bd9a27b6680631de93239f213a795f8827ad6d0b52167e266af16c633201e" exitCode=0 Dec 01 07:10:08 crc kubenswrapper[4822]: I1201 07:10:08.814378 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8845-account-create-update-rjq78" event={"ID":"d6636d12-3187-445d-afaf-2218dd71d932","Type":"ContainerDied","Data":"449bd9a27b6680631de93239f213a795f8827ad6d0b52167e266af16c633201e"} Dec 01 07:10:08 crc kubenswrapper[4822]: I1201 07:10:08.847682 4822 generic.go:334] "Generic (PLEG): container finished" podID="f369e477-2b99-4f5b-abb8-eb788818325f" containerID="0fb01169b6f760501811400952165121826877ab0f41bcfb6de0a4bad911be52" exitCode=0 Dec 01 07:10:08 crc kubenswrapper[4822]: I1201 07:10:08.847729 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hbjw6" event={"ID":"f369e477-2b99-4f5b-abb8-eb788818325f","Type":"ContainerDied","Data":"0fb01169b6f760501811400952165121826877ab0f41bcfb6de0a4bad911be52"} Dec 01 07:10:09 crc kubenswrapper[4822]: I1201 07:10:09.858856 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerID="3a3f116481c73dfe5a9a8a3545e38ee6bdbcf5cf5bee9252fffc9f947030fbe5" exitCode=0 Dec 01 07:10:09 crc kubenswrapper[4822]: I1201 07:10:09.859367 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1229c08-35a5-4f16-8334-f32bb9b852b6","Type":"ContainerDied","Data":"3a3f116481c73dfe5a9a8a3545e38ee6bdbcf5cf5bee9252fffc9f947030fbe5"} Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.276870 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:10:10 crc kubenswrapper[4822]: E1201 07:10:10.277384 4822 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 07:10:10 crc kubenswrapper[4822]: E1201 07:10:10.277401 4822 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 07:10:10 crc kubenswrapper[4822]: E1201 07:10:10.277448 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift podName:b21643f1-db8c-4613-ac29-f1d4d0970b7b nodeName:}" failed. No retries permitted until 2025-12-01 07:10:26.277432451 +0000 UTC m=+1181.598240137 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift") pod "swift-storage-0" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b") : configmap "swift-ring-files" not found Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.458506 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.461008 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.505353 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.507819 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.507893 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-q78vl" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" probeResult="failure" output=< Dec 01 07:10:10 crc kubenswrapper[4822]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 07:10:10 crc kubenswrapper[4822]: > Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.586747 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsrxg\" (UniqueName: \"kubernetes.io/projected/d6636d12-3187-445d-afaf-2218dd71d932-kube-api-access-fsrxg\") pod \"d6636d12-3187-445d-afaf-2218dd71d932\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.586828 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6636d12-3187-445d-afaf-2218dd71d932-operator-scripts\") pod \"d6636d12-3187-445d-afaf-2218dd71d932\" (UID: \"d6636d12-3187-445d-afaf-2218dd71d932\") " Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.586961 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz8jp\" (UniqueName: \"kubernetes.io/projected/f369e477-2b99-4f5b-abb8-eb788818325f-kube-api-access-tz8jp\") pod \"f369e477-2b99-4f5b-abb8-eb788818325f\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.587038 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f369e477-2b99-4f5b-abb8-eb788818325f-operator-scripts\") pod \"f369e477-2b99-4f5b-abb8-eb788818325f\" (UID: \"f369e477-2b99-4f5b-abb8-eb788818325f\") " Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.587605 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6636d12-3187-445d-afaf-2218dd71d932-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d6636d12-3187-445d-afaf-2218dd71d932" (UID: "d6636d12-3187-445d-afaf-2218dd71d932"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.587715 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f369e477-2b99-4f5b-abb8-eb788818325f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f369e477-2b99-4f5b-abb8-eb788818325f" (UID: "f369e477-2b99-4f5b-abb8-eb788818325f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.592961 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6636d12-3187-445d-afaf-2218dd71d932-kube-api-access-fsrxg" (OuterVolumeSpecName: "kube-api-access-fsrxg") pod "d6636d12-3187-445d-afaf-2218dd71d932" (UID: "d6636d12-3187-445d-afaf-2218dd71d932"). InnerVolumeSpecName "kube-api-access-fsrxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.595163 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f369e477-2b99-4f5b-abb8-eb788818325f-kube-api-access-tz8jp" (OuterVolumeSpecName: "kube-api-access-tz8jp") pod "f369e477-2b99-4f5b-abb8-eb788818325f" (UID: "f369e477-2b99-4f5b-abb8-eb788818325f"). InnerVolumeSpecName "kube-api-access-tz8jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.688954 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsrxg\" (UniqueName: \"kubernetes.io/projected/d6636d12-3187-445d-afaf-2218dd71d932-kube-api-access-fsrxg\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.689357 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d6636d12-3187-445d-afaf-2218dd71d932-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.689373 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz8jp\" (UniqueName: \"kubernetes.io/projected/f369e477-2b99-4f5b-abb8-eb788818325f-kube-api-access-tz8jp\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.689384 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f369e477-2b99-4f5b-abb8-eb788818325f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.799007 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-q78vl-config-4qr5t"] Dec 01 07:10:10 crc kubenswrapper[4822]: E1201 07:10:10.799439 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6636d12-3187-445d-afaf-2218dd71d932" containerName="mariadb-account-create-update" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.799462 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6636d12-3187-445d-afaf-2218dd71d932" containerName="mariadb-account-create-update" Dec 01 07:10:10 crc kubenswrapper[4822]: E1201 07:10:10.799496 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f369e477-2b99-4f5b-abb8-eb788818325f" containerName="mariadb-database-create" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.799507 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f369e477-2b99-4f5b-abb8-eb788818325f" containerName="mariadb-database-create" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.799746 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f369e477-2b99-4f5b-abb8-eb788818325f" containerName="mariadb-database-create" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.799770 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6636d12-3187-445d-afaf-2218dd71d932" containerName="mariadb-account-create-update" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.800461 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.829341 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.845475 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-2t96f"] Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.847557 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.864355 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-ef00-account-create-update-h4cql"] Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.866462 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.885475 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-q78vl-config-4qr5t"] Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.889467 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.894726 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2t96f"] Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.911786 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hbjw6" event={"ID":"f369e477-2b99-4f5b-abb8-eb788818325f","Type":"ContainerDied","Data":"3a5c1f341f527e7d49232ac348de4796c93e98b0e4c7f1b323a59d35d3cddce9"} Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.912016 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a5c1f341f527e7d49232ac348de4796c93e98b0e4c7f1b323a59d35d3cddce9" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.912137 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hbjw6" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.918531 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-ef00-account-create-update-h4cql"] Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.923313 4822 generic.go:334] "Generic (PLEG): container finished" podID="da2985c5-716e-43ad-b892-ea29d88fa639" containerID="d49bbf181bc9328d72a73c564789a29b58507dddaa3e5e79ce55c2b497a8f7ef" exitCode=0 Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.923454 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"da2985c5-716e-43ad-b892-ea29d88fa639","Type":"ContainerDied","Data":"d49bbf181bc9328d72a73c564789a29b58507dddaa3e5e79ce55c2b497a8f7ef"} Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.936922 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.936937 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8845-account-create-update-rjq78" event={"ID":"d6636d12-3187-445d-afaf-2218dd71d932","Type":"ContainerDied","Data":"0efa3fcbb3d499d3747bba77f1ffe4a4a142d4081d64f002d385ba536e756364"} Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.937002 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0efa3fcbb3d499d3747bba77f1ffe4a4a142d4081d64f002d385ba536e756364" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.940766 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1229c08-35a5-4f16-8334-f32bb9b852b6","Type":"ContainerStarted","Data":"eb9c3b14412dab696cda88040a38b9cdac23b8d8872cc1f5f086a31ccba67ffc"} Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.941054 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996529 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a630d16e-8baf-4fbd-851d-ae5c49baf062-operator-scripts\") pod \"keystone-ef00-account-create-update-h4cql\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996665 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-additional-scripts\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996704 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzftd\" (UniqueName: \"kubernetes.io/projected/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-kube-api-access-kzftd\") pod \"keystone-db-create-2t96f\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996741 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmbvv\" (UniqueName: \"kubernetes.io/projected/30a3a972-051f-455e-b6a0-892d0b2975ef-kube-api-access-fmbvv\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996801 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brqtl\" (UniqueName: \"kubernetes.io/projected/a630d16e-8baf-4fbd-851d-ae5c49baf062-kube-api-access-brqtl\") pod \"keystone-ef00-account-create-update-h4cql\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996823 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-log-ovn\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996864 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run-ovn\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996892 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-operator-scripts\") pod \"keystone-db-create-2t96f\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996925 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:10 crc kubenswrapper[4822]: I1201 07:10:10.996944 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-scripts\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.011650 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.864209068 podStartE2EDuration="1m15.011629098s" podCreationTimestamp="2025-12-01 07:08:56 +0000 UTC" firstStartedPulling="2025-12-01 07:08:59.514495751 +0000 UTC m=+1094.835303437" lastFinishedPulling="2025-12-01 07:09:35.661915781 +0000 UTC m=+1130.982723467" observedRunningTime="2025-12-01 07:10:11.004082566 +0000 UTC m=+1166.324890252" watchObservedRunningTime="2025-12-01 07:10:11.011629098 +0000 UTC m=+1166.332436784" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100264 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a630d16e-8baf-4fbd-851d-ae5c49baf062-operator-scripts\") pod \"keystone-ef00-account-create-update-h4cql\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100345 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-additional-scripts\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100397 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzftd\" (UniqueName: \"kubernetes.io/projected/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-kube-api-access-kzftd\") pod \"keystone-db-create-2t96f\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100474 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmbvv\" (UniqueName: \"kubernetes.io/projected/30a3a972-051f-455e-b6a0-892d0b2975ef-kube-api-access-fmbvv\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100479 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a630d16e-8baf-4fbd-851d-ae5c49baf062-operator-scripts\") pod \"keystone-ef00-account-create-update-h4cql\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100620 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brqtl\" (UniqueName: \"kubernetes.io/projected/a630d16e-8baf-4fbd-851d-ae5c49baf062-kube-api-access-brqtl\") pod \"keystone-ef00-account-create-update-h4cql\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100670 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-log-ovn\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100776 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run-ovn\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100843 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-operator-scripts\") pod \"keystone-db-create-2t96f\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.100972 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.101005 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-scripts\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.101462 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run-ovn\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.102344 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-log-ovn\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.102655 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-additional-scripts\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.103025 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.103915 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-operator-scripts\") pod \"keystone-db-create-2t96f\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.104800 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-scripts\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.122208 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmbvv\" (UniqueName: \"kubernetes.io/projected/30a3a972-051f-455e-b6a0-892d0b2975ef-kube-api-access-fmbvv\") pod \"ovn-controller-q78vl-config-4qr5t\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.123843 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzftd\" (UniqueName: \"kubernetes.io/projected/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-kube-api-access-kzftd\") pod \"keystone-db-create-2t96f\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.124970 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brqtl\" (UniqueName: \"kubernetes.io/projected/a630d16e-8baf-4fbd-851d-ae5c49baf062-kube-api-access-brqtl\") pod \"keystone-ef00-account-create-update-h4cql\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.143471 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.212602 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.234741 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.577483 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-q78vl-config-4qr5t"] Dec 01 07:10:11 crc kubenswrapper[4822]: W1201 07:10:11.606333 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30a3a972_051f_455e_b6a0_892d0b2975ef.slice/crio-ec022f12f1f67e088d61c052773f500f7aa31eb20a9f01a54cda64df02ae1eed WatchSource:0}: Error finding container ec022f12f1f67e088d61c052773f500f7aa31eb20a9f01a54cda64df02ae1eed: Status 404 returned error can't find the container with id ec022f12f1f67e088d61c052773f500f7aa31eb20a9f01a54cda64df02ae1eed Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.903879 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-h9ttd"] Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.904816 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.907513 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.907642 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-zzv5d" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.920655 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-h9ttd"] Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.947141 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl-config-4qr5t" event={"ID":"30a3a972-051f-455e-b6a0-892d0b2975ef","Type":"ContainerStarted","Data":"ec022f12f1f67e088d61c052773f500f7aa31eb20a9f01a54cda64df02ae1eed"} Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.949322 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"da2985c5-716e-43ad-b892-ea29d88fa639","Type":"ContainerStarted","Data":"67b4ee72481fb3afcfc3392e80b6461b38dd56f3a4807eaae7eb4e9cb55e7a0c"} Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.950771 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.956953 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2t96f"] Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.966412 4822 generic.go:334] "Generic (PLEG): container finished" podID="a5762ef7-aac0-426f-a391-d50a06868dee" containerID="daeb2606271c3bc7ac98c2ea3f6c896b418e1dc4f9f359b298cabd839d6d1a12" exitCode=0 Dec 01 07:10:11 crc kubenswrapper[4822]: I1201 07:10:11.966497 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mvk28" event={"ID":"a5762ef7-aac0-426f-a391-d50a06868dee","Type":"ContainerDied","Data":"daeb2606271c3bc7ac98c2ea3f6c896b418e1dc4f9f359b298cabd839d6d1a12"} Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.005855 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371960.848942 podStartE2EDuration="1m16.005833843s" podCreationTimestamp="2025-12-01 07:08:56 +0000 UTC" firstStartedPulling="2025-12-01 07:08:59.486994799 +0000 UTC m=+1094.807802485" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:11.997364255 +0000 UTC m=+1167.318171951" watchObservedRunningTime="2025-12-01 07:10:12.005833843 +0000 UTC m=+1167.326641529" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.025407 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-ef00-account-create-update-h4cql"] Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.036369 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-config-data\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.036451 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-combined-ca-bundle\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.036496 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-db-sync-config-data\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.036570 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-547xt\" (UniqueName: \"kubernetes.io/projected/3f992a05-1279-4c84-b09b-977a6b1e4ea8-kube-api-access-547xt\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.137869 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-config-data\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.138055 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-combined-ca-bundle\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.138080 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-db-sync-config-data\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.138132 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-547xt\" (UniqueName: \"kubernetes.io/projected/3f992a05-1279-4c84-b09b-977a6b1e4ea8-kube-api-access-547xt\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.150150 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-db-sync-config-data\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.150186 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-config-data\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.156158 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-combined-ca-bundle\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.166145 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-547xt\" (UniqueName: \"kubernetes.io/projected/3f992a05-1279-4c84-b09b-977a6b1e4ea8-kube-api-access-547xt\") pod \"glance-db-sync-h9ttd\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.232748 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.850583 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-h9ttd"] Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.975687 4822 generic.go:334] "Generic (PLEG): container finished" podID="e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" containerID="f5ca2f96ae7a12beec4679c70a0fc08d1c3054a30bfc4e4071fda687d5f7b971" exitCode=0 Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.975743 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2t96f" event={"ID":"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a","Type":"ContainerDied","Data":"f5ca2f96ae7a12beec4679c70a0fc08d1c3054a30bfc4e4071fda687d5f7b971"} Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.975791 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2t96f" event={"ID":"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a","Type":"ContainerStarted","Data":"d64dd407d12facd7d23438d6840579e666fced5fb43ec6e594e54553db98b660"} Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.977042 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h9ttd" event={"ID":"3f992a05-1279-4c84-b09b-977a6b1e4ea8","Type":"ContainerStarted","Data":"583076057f35817395ebb630bdebd796135f65cba6e82ca4b88083d401417932"} Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.978403 4822 generic.go:334] "Generic (PLEG): container finished" podID="a630d16e-8baf-4fbd-851d-ae5c49baf062" containerID="deae1406f33a30faeb722a6c9ba9c11668ee2406828f189e39fe3ff530735d90" exitCode=0 Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.978539 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-ef00-account-create-update-h4cql" event={"ID":"a630d16e-8baf-4fbd-851d-ae5c49baf062","Type":"ContainerDied","Data":"deae1406f33a30faeb722a6c9ba9c11668ee2406828f189e39fe3ff530735d90"} Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.978614 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-ef00-account-create-update-h4cql" event={"ID":"a630d16e-8baf-4fbd-851d-ae5c49baf062","Type":"ContainerStarted","Data":"7fa68a27e7ab05e6368fcb7af3655150964b3db9d5839de2c24c6cd24c20145e"} Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.980346 4822 generic.go:334] "Generic (PLEG): container finished" podID="30a3a972-051f-455e-b6a0-892d0b2975ef" containerID="db32a07804c815b922bb3650cdaa9cf76de9ef8ee30f368859af9cc6273db1f1" exitCode=0 Dec 01 07:10:12 crc kubenswrapper[4822]: I1201 07:10:12.980448 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl-config-4qr5t" event={"ID":"30a3a972-051f-455e-b6a0-892d0b2975ef","Type":"ContainerDied","Data":"db32a07804c815b922bb3650cdaa9cf76de9ef8ee30f368859af9cc6273db1f1"} Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.453679 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.577778 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdhks\" (UniqueName: \"kubernetes.io/projected/a5762ef7-aac0-426f-a391-d50a06868dee-kube-api-access-rdhks\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.577843 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-ring-data-devices\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.577908 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5762ef7-aac0-426f-a391-d50a06868dee-etc-swift\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.577987 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-swiftconf\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.578053 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-dispersionconf\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.578087 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-combined-ca-bundle\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.578122 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-scripts\") pod \"a5762ef7-aac0-426f-a391-d50a06868dee\" (UID: \"a5762ef7-aac0-426f-a391-d50a06868dee\") " Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.578921 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.579089 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5762ef7-aac0-426f-a391-d50a06868dee-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.586847 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5762ef7-aac0-426f-a391-d50a06868dee-kube-api-access-rdhks" (OuterVolumeSpecName: "kube-api-access-rdhks") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "kube-api-access-rdhks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.589745 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.603794 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.608218 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.622464 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-scripts" (OuterVolumeSpecName: "scripts") pod "a5762ef7-aac0-426f-a391-d50a06868dee" (UID: "a5762ef7-aac0-426f-a391-d50a06868dee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693673 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693716 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdhks\" (UniqueName: \"kubernetes.io/projected/a5762ef7-aac0-426f-a391-d50a06868dee-kube-api-access-rdhks\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693729 4822 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5762ef7-aac0-426f-a391-d50a06868dee-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693738 4822 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5762ef7-aac0-426f-a391-d50a06868dee-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693771 4822 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693781 4822 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.693791 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5762ef7-aac0-426f-a391-d50a06868dee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.989604 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mvk28" Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.989731 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mvk28" event={"ID":"a5762ef7-aac0-426f-a391-d50a06868dee","Type":"ContainerDied","Data":"1da6c85d219156a05b3822d9346dff966fd0fde319d53be9c7faf4be10ac622e"} Dec 01 07:10:13 crc kubenswrapper[4822]: I1201 07:10:13.990063 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1da6c85d219156a05b3822d9346dff966fd0fde319d53be9c7faf4be10ac622e" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.611363 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.711595 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run\") pod \"30a3a972-051f-455e-b6a0-892d0b2975ef\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.711696 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-additional-scripts\") pod \"30a3a972-051f-455e-b6a0-892d0b2975ef\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.711800 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-scripts\") pod \"30a3a972-051f-455e-b6a0-892d0b2975ef\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.711960 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run-ovn\") pod \"30a3a972-051f-455e-b6a0-892d0b2975ef\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712018 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmbvv\" (UniqueName: \"kubernetes.io/projected/30a3a972-051f-455e-b6a0-892d0b2975ef-kube-api-access-fmbvv\") pod \"30a3a972-051f-455e-b6a0-892d0b2975ef\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712093 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-log-ovn\") pod \"30a3a972-051f-455e-b6a0-892d0b2975ef\" (UID: \"30a3a972-051f-455e-b6a0-892d0b2975ef\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712216 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run" (OuterVolumeSpecName: "var-run") pod "30a3a972-051f-455e-b6a0-892d0b2975ef" (UID: "30a3a972-051f-455e-b6a0-892d0b2975ef"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712336 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "30a3a972-051f-455e-b6a0-892d0b2975ef" (UID: "30a3a972-051f-455e-b6a0-892d0b2975ef"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712454 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "30a3a972-051f-455e-b6a0-892d0b2975ef" (UID: "30a3a972-051f-455e-b6a0-892d0b2975ef"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712756 4822 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712828 4822 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.712908 4822 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/30a3a972-051f-455e-b6a0-892d0b2975ef-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.713837 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "30a3a972-051f-455e-b6a0-892d0b2975ef" (UID: "30a3a972-051f-455e-b6a0-892d0b2975ef"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.715367 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-scripts" (OuterVolumeSpecName: "scripts") pod "30a3a972-051f-455e-b6a0-892d0b2975ef" (UID: "30a3a972-051f-455e-b6a0-892d0b2975ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.727728 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30a3a972-051f-455e-b6a0-892d0b2975ef-kube-api-access-fmbvv" (OuterVolumeSpecName: "kube-api-access-fmbvv") pod "30a3a972-051f-455e-b6a0-892d0b2975ef" (UID: "30a3a972-051f-455e-b6a0-892d0b2975ef"). InnerVolumeSpecName "kube-api-access-fmbvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.729898 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.738246 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.814634 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzftd\" (UniqueName: \"kubernetes.io/projected/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-kube-api-access-kzftd\") pod \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.814799 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-operator-scripts\") pod \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\" (UID: \"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.814873 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brqtl\" (UniqueName: \"kubernetes.io/projected/a630d16e-8baf-4fbd-851d-ae5c49baf062-kube-api-access-brqtl\") pod \"a630d16e-8baf-4fbd-851d-ae5c49baf062\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.814948 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a630d16e-8baf-4fbd-851d-ae5c49baf062-operator-scripts\") pod \"a630d16e-8baf-4fbd-851d-ae5c49baf062\" (UID: \"a630d16e-8baf-4fbd-851d-ae5c49baf062\") " Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.815428 4822 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.815443 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a3a972-051f-455e-b6a0-892d0b2975ef-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.815466 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmbvv\" (UniqueName: \"kubernetes.io/projected/30a3a972-051f-455e-b6a0-892d0b2975ef-kube-api-access-fmbvv\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.815718 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" (UID: "e7060e1b-a0a1-4403-a0f3-6fb1ffff308a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.815757 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a630d16e-8baf-4fbd-851d-ae5c49baf062-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a630d16e-8baf-4fbd-851d-ae5c49baf062" (UID: "a630d16e-8baf-4fbd-851d-ae5c49baf062"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.818292 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-kube-api-access-kzftd" (OuterVolumeSpecName: "kube-api-access-kzftd") pod "e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" (UID: "e7060e1b-a0a1-4403-a0f3-6fb1ffff308a"). InnerVolumeSpecName "kube-api-access-kzftd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.819480 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a630d16e-8baf-4fbd-851d-ae5c49baf062-kube-api-access-brqtl" (OuterVolumeSpecName: "kube-api-access-brqtl") pod "a630d16e-8baf-4fbd-851d-ae5c49baf062" (UID: "a630d16e-8baf-4fbd-851d-ae5c49baf062"). InnerVolumeSpecName "kube-api-access-brqtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.918640 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzftd\" (UniqueName: \"kubernetes.io/projected/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-kube-api-access-kzftd\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.918706 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.918722 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brqtl\" (UniqueName: \"kubernetes.io/projected/a630d16e-8baf-4fbd-851d-ae5c49baf062-kube-api-access-brqtl\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:14 crc kubenswrapper[4822]: I1201 07:10:14.918735 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a630d16e-8baf-4fbd-851d-ae5c49baf062-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.014926 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2t96f" event={"ID":"e7060e1b-a0a1-4403-a0f3-6fb1ffff308a","Type":"ContainerDied","Data":"d64dd407d12facd7d23438d6840579e666fced5fb43ec6e594e54553db98b660"} Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.014962 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d64dd407d12facd7d23438d6840579e666fced5fb43ec6e594e54553db98b660" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.015020 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2t96f" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.023069 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-ef00-account-create-update-h4cql" event={"ID":"a630d16e-8baf-4fbd-851d-ae5c49baf062","Type":"ContainerDied","Data":"7fa68a27e7ab05e6368fcb7af3655150964b3db9d5839de2c24c6cd24c20145e"} Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.023097 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fa68a27e7ab05e6368fcb7af3655150964b3db9d5839de2c24c6cd24c20145e" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.023154 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ef00-account-create-update-h4cql" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.028892 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl-config-4qr5t" event={"ID":"30a3a972-051f-455e-b6a0-892d0b2975ef","Type":"ContainerDied","Data":"ec022f12f1f67e088d61c052773f500f7aa31eb20a9f01a54cda64df02ae1eed"} Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.028928 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec022f12f1f67e088d61c052773f500f7aa31eb20a9f01a54cda64df02ae1eed" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.029000 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl-config-4qr5t" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.458683 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-q78vl" Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.733445 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-q78vl-config-4qr5t"] Dec 01 07:10:15 crc kubenswrapper[4822]: I1201 07:10:15.740281 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-q78vl-config-4qr5t"] Dec 01 07:10:16 crc kubenswrapper[4822]: I1201 07:10:16.967688 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30a3a972-051f-455e-b6a0-892d0b2975ef" path="/var/lib/kubelet/pods/30a3a972-051f-455e-b6a0-892d0b2975ef/volumes" Dec 01 07:10:26 crc kubenswrapper[4822]: I1201 07:10:26.374615 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:10:26 crc kubenswrapper[4822]: I1201 07:10:26.383340 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"swift-storage-0\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " pod="openstack/swift-storage-0" Dec 01 07:10:26 crc kubenswrapper[4822]: I1201 07:10:26.615032 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 07:10:28 crc kubenswrapper[4822]: I1201 07:10:28.274788 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:10:28 crc kubenswrapper[4822]: I1201 07:10:28.395066 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 07:10:29 crc kubenswrapper[4822]: I1201 07:10:29.711415 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 07:10:29 crc kubenswrapper[4822]: W1201 07:10:29.726318 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb21643f1_db8c_4613_ac29_f1d4d0970b7b.slice/crio-65ccad1bf4c8dff4313c68c60a071fe45b41c30286d603997e9e03dc886f8779 WatchSource:0}: Error finding container 65ccad1bf4c8dff4313c68c60a071fe45b41c30286d603997e9e03dc886f8779: Status 404 returned error can't find the container with id 65ccad1bf4c8dff4313c68c60a071fe45b41c30286d603997e9e03dc886f8779 Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.200210 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"65ccad1bf4c8dff4313c68c60a071fe45b41c30286d603997e9e03dc886f8779"} Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.413058 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-cxrh8"] Dec 01 07:10:30 crc kubenswrapper[4822]: E1201 07:10:30.417455 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a630d16e-8baf-4fbd-851d-ae5c49baf062" containerName="mariadb-account-create-update" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.417625 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a630d16e-8baf-4fbd-851d-ae5c49baf062" containerName="mariadb-account-create-update" Dec 01 07:10:30 crc kubenswrapper[4822]: E1201 07:10:30.417726 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5762ef7-aac0-426f-a391-d50a06868dee" containerName="swift-ring-rebalance" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.417811 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5762ef7-aac0-426f-a391-d50a06868dee" containerName="swift-ring-rebalance" Dec 01 07:10:30 crc kubenswrapper[4822]: E1201 07:10:30.417893 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a3a972-051f-455e-b6a0-892d0b2975ef" containerName="ovn-config" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.417983 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a3a972-051f-455e-b6a0-892d0b2975ef" containerName="ovn-config" Dec 01 07:10:30 crc kubenswrapper[4822]: E1201 07:10:30.418064 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" containerName="mariadb-database-create" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.418141 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" containerName="mariadb-database-create" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.418424 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a630d16e-8baf-4fbd-851d-ae5c49baf062" containerName="mariadb-account-create-update" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.418564 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5762ef7-aac0-426f-a391-d50a06868dee" containerName="swift-ring-rebalance" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.418669 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" containerName="mariadb-database-create" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.418755 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a3a972-051f-455e-b6a0-892d0b2975ef" containerName="ovn-config" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.419566 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.433075 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cxrh8"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.511866 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6dhf2"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.513716 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.531879 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6dhf2"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.541471 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6e4d-account-create-update-vz22v"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.542434 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.544244 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcr5f\" (UniqueName: \"kubernetes.io/projected/246a1687-b876-4135-9a78-5aea28bd9663-kube-api-access-jcr5f\") pod \"cinder-db-create-cxrh8\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.544361 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/246a1687-b876-4135-9a78-5aea28bd9663-operator-scripts\") pod \"cinder-db-create-cxrh8\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.547747 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.586100 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6e4d-account-create-update-vz22v"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.645453 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/246a1687-b876-4135-9a78-5aea28bd9663-operator-scripts\") pod \"cinder-db-create-cxrh8\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.645612 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcr5f\" (UniqueName: \"kubernetes.io/projected/246a1687-b876-4135-9a78-5aea28bd9663-kube-api-access-jcr5f\") pod \"cinder-db-create-cxrh8\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.645640 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhpsc\" (UniqueName: \"kubernetes.io/projected/61ff107a-0e1c-4c16-b842-7ee341347c9e-kube-api-access-dhpsc\") pod \"barbican-db-create-6dhf2\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.645666 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a38c501-78bf-41cc-8caa-cf049fe4821d-operator-scripts\") pod \"barbican-6e4d-account-create-update-vz22v\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.645684 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61ff107a-0e1c-4c16-b842-7ee341347c9e-operator-scripts\") pod \"barbican-db-create-6dhf2\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.645704 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25zrr\" (UniqueName: \"kubernetes.io/projected/4a38c501-78bf-41cc-8caa-cf049fe4821d-kube-api-access-25zrr\") pod \"barbican-6e4d-account-create-update-vz22v\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.646475 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/246a1687-b876-4135-9a78-5aea28bd9663-operator-scripts\") pod \"cinder-db-create-cxrh8\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.678366 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcr5f\" (UniqueName: \"kubernetes.io/projected/246a1687-b876-4135-9a78-5aea28bd9663-kube-api-access-jcr5f\") pod \"cinder-db-create-cxrh8\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.739336 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-tdc4c"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.740619 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.746951 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61ff107a-0e1c-4c16-b842-7ee341347c9e-operator-scripts\") pod \"barbican-db-create-6dhf2\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.746998 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25zrr\" (UniqueName: \"kubernetes.io/projected/4a38c501-78bf-41cc-8caa-cf049fe4821d-kube-api-access-25zrr\") pod \"barbican-6e4d-account-create-update-vz22v\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.747112 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhpsc\" (UniqueName: \"kubernetes.io/projected/61ff107a-0e1c-4c16-b842-7ee341347c9e-kube-api-access-dhpsc\") pod \"barbican-db-create-6dhf2\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.747134 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a38c501-78bf-41cc-8caa-cf049fe4821d-operator-scripts\") pod \"barbican-6e4d-account-create-update-vz22v\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.747645 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61ff107a-0e1c-4c16-b842-7ee341347c9e-operator-scripts\") pod \"barbican-db-create-6dhf2\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.747789 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a38c501-78bf-41cc-8caa-cf049fe4821d-operator-scripts\") pod \"barbican-6e4d-account-create-update-vz22v\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.764831 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-tdc4c"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.775880 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.800114 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25zrr\" (UniqueName: \"kubernetes.io/projected/4a38c501-78bf-41cc-8caa-cf049fe4821d-kube-api-access-25zrr\") pod \"barbican-6e4d-account-create-update-vz22v\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.818320 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhpsc\" (UniqueName: \"kubernetes.io/projected/61ff107a-0e1c-4c16-b842-7ee341347c9e-kube-api-access-dhpsc\") pod \"barbican-db-create-6dhf2\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.839262 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.848923 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-ac9b-account-create-update-khj8h"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.850205 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.851743 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82sks\" (UniqueName: \"kubernetes.io/projected/be2b75bc-27b6-4e33-8f17-0c30b4512014-kube-api-access-82sks\") pod \"neutron-db-create-tdc4c\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.851831 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be2b75bc-27b6-4e33-8f17-0c30b4512014-operator-scripts\") pod \"neutron-db-create-tdc4c\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.856846 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.868360 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.884924 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ac9b-account-create-update-khj8h"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.924789 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-n4pm7"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.925880 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.929884 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dcwzn" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.930090 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.930093 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.930431 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.938613 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-40f1-account-create-update-frwxk"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.939801 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.942789 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.954490 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82sks\" (UniqueName: \"kubernetes.io/projected/be2b75bc-27b6-4e33-8f17-0c30b4512014-kube-api-access-82sks\") pod \"neutron-db-create-tdc4c\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.956638 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be2b75bc-27b6-4e33-8f17-0c30b4512014-operator-scripts\") pod \"neutron-db-create-tdc4c\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.956702 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1e0d17-7093-464d-ae8f-6b483a28558b-operator-scripts\") pod \"cinder-ac9b-account-create-update-khj8h\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.956810 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6kqr\" (UniqueName: \"kubernetes.io/projected/1c1e0d17-7093-464d-ae8f-6b483a28558b-kube-api-access-c6kqr\") pod \"cinder-ac9b-account-create-update-khj8h\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.957815 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be2b75bc-27b6-4e33-8f17-0c30b4512014-operator-scripts\") pod \"neutron-db-create-tdc4c\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.983013 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82sks\" (UniqueName: \"kubernetes.io/projected/be2b75bc-27b6-4e33-8f17-0c30b4512014-kube-api-access-82sks\") pod \"neutron-db-create-tdc4c\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.995642 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-40f1-account-create-update-frwxk"] Dec 01 07:10:30 crc kubenswrapper[4822]: I1201 07:10:30.995861 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-n4pm7"] Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.059676 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060177 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-combined-ca-bundle\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060259 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-config-data\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060337 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1e0d17-7093-464d-ae8f-6b483a28558b-operator-scripts\") pod \"cinder-ac9b-account-create-update-khj8h\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060377 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lsp7\" (UniqueName: \"kubernetes.io/projected/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-kube-api-access-2lsp7\") pod \"neutron-40f1-account-create-update-frwxk\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060464 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwrct\" (UniqueName: \"kubernetes.io/projected/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-kube-api-access-fwrct\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060503 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-operator-scripts\") pod \"neutron-40f1-account-create-update-frwxk\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.060561 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6kqr\" (UniqueName: \"kubernetes.io/projected/1c1e0d17-7093-464d-ae8f-6b483a28558b-kube-api-access-c6kqr\") pod \"cinder-ac9b-account-create-update-khj8h\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.061123 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1e0d17-7093-464d-ae8f-6b483a28558b-operator-scripts\") pod \"cinder-ac9b-account-create-update-khj8h\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.081015 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6kqr\" (UniqueName: \"kubernetes.io/projected/1c1e0d17-7093-464d-ae8f-6b483a28558b-kube-api-access-c6kqr\") pod \"cinder-ac9b-account-create-update-khj8h\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.162603 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-combined-ca-bundle\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.162685 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-config-data\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.162719 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lsp7\" (UniqueName: \"kubernetes.io/projected/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-kube-api-access-2lsp7\") pod \"neutron-40f1-account-create-update-frwxk\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.162746 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwrct\" (UniqueName: \"kubernetes.io/projected/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-kube-api-access-fwrct\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.162768 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-operator-scripts\") pod \"neutron-40f1-account-create-update-frwxk\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.163828 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-operator-scripts\") pod \"neutron-40f1-account-create-update-frwxk\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.167718 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-config-data\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.168579 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-combined-ca-bundle\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.180950 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lsp7\" (UniqueName: \"kubernetes.io/projected/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-kube-api-access-2lsp7\") pod \"neutron-40f1-account-create-update-frwxk\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.186068 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwrct\" (UniqueName: \"kubernetes.io/projected/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-kube-api-access-fwrct\") pod \"keystone-db-sync-n4pm7\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.252676 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.277691 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h9ttd" event={"ID":"3f992a05-1279-4c84-b09b-977a6b1e4ea8","Type":"ContainerStarted","Data":"75e7995266bdbf6492daa619b0f5223c1c2481c8944e162cc407d65a37b501f6"} Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.317218 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-h9ttd" podStartSLOduration=3.465935156 podStartE2EDuration="20.317185518s" podCreationTimestamp="2025-12-01 07:10:11 +0000 UTC" firstStartedPulling="2025-12-01 07:10:12.856661769 +0000 UTC m=+1168.177469455" lastFinishedPulling="2025-12-01 07:10:29.707912131 +0000 UTC m=+1185.028719817" observedRunningTime="2025-12-01 07:10:31.314242785 +0000 UTC m=+1186.635050551" watchObservedRunningTime="2025-12-01 07:10:31.317185518 +0000 UTC m=+1186.637993204" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.330405 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.342255 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.366158 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cxrh8"] Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.383411 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6e4d-account-create-update-vz22v"] Dec 01 07:10:31 crc kubenswrapper[4822]: W1201 07:10:31.419085 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a38c501_78bf_41cc_8caa_cf049fe4821d.slice/crio-5a0919ee0579dd2af9ec38643eb422bdce5a682aee151ac7d8fb83739980cc6f WatchSource:0}: Error finding container 5a0919ee0579dd2af9ec38643eb422bdce5a682aee151ac7d8fb83739980cc6f: Status 404 returned error can't find the container with id 5a0919ee0579dd2af9ec38643eb422bdce5a682aee151ac7d8fb83739980cc6f Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.758182 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6dhf2"] Dec 01 07:10:31 crc kubenswrapper[4822]: W1201 07:10:31.778670 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61ff107a_0e1c_4c16_b842_7ee341347c9e.slice/crio-842bb467acae2a51ee1f5bf8eff18b4f2c8396c491f1f057a9b20da78e222f48 WatchSource:0}: Error finding container 842bb467acae2a51ee1f5bf8eff18b4f2c8396c491f1f057a9b20da78e222f48: Status 404 returned error can't find the container with id 842bb467acae2a51ee1f5bf8eff18b4f2c8396c491f1f057a9b20da78e222f48 Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.801283 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-tdc4c"] Dec 01 07:10:31 crc kubenswrapper[4822]: W1201 07:10:31.817474 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe2b75bc_27b6_4e33_8f17_0c30b4512014.slice/crio-86eb851f446e9bec48e7f91959302fbaf011d45f4be1473034a4e4e2533fe284 WatchSource:0}: Error finding container 86eb851f446e9bec48e7f91959302fbaf011d45f4be1473034a4e4e2533fe284: Status 404 returned error can't find the container with id 86eb851f446e9bec48e7f91959302fbaf011d45f4be1473034a4e4e2533fe284 Dec 01 07:10:31 crc kubenswrapper[4822]: I1201 07:10:31.958695 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ac9b-account-create-update-khj8h"] Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.052422 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-n4pm7"] Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.061662 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-40f1-account-create-update-frwxk"] Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.286978 4822 generic.go:334] "Generic (PLEG): container finished" podID="4a38c501-78bf-41cc-8caa-cf049fe4821d" containerID="c4e609b36f87f6435791314d2a1c34cae79c7cc179b73cee36977247f8022d6b" exitCode=0 Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.287336 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6e4d-account-create-update-vz22v" event={"ID":"4a38c501-78bf-41cc-8caa-cf049fe4821d","Type":"ContainerDied","Data":"c4e609b36f87f6435791314d2a1c34cae79c7cc179b73cee36977247f8022d6b"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.287383 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6e4d-account-create-update-vz22v" event={"ID":"4a38c501-78bf-41cc-8caa-cf049fe4821d","Type":"ContainerStarted","Data":"5a0919ee0579dd2af9ec38643eb422bdce5a682aee151ac7d8fb83739980cc6f"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.290339 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tdc4c" event={"ID":"be2b75bc-27b6-4e33-8f17-0c30b4512014","Type":"ContainerStarted","Data":"4897f922706c50863ab31b9b5bb98546c2f8892b8567054eb68d14885496f350"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.290362 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tdc4c" event={"ID":"be2b75bc-27b6-4e33-8f17-0c30b4512014","Type":"ContainerStarted","Data":"86eb851f446e9bec48e7f91959302fbaf011d45f4be1473034a4e4e2533fe284"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.296716 4822 generic.go:334] "Generic (PLEG): container finished" podID="246a1687-b876-4135-9a78-5aea28bd9663" containerID="d36d158bda666c23ed6022000dfa5fbf2867d83d065c03a9293477def80aa751" exitCode=0 Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.296771 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cxrh8" event={"ID":"246a1687-b876-4135-9a78-5aea28bd9663","Type":"ContainerDied","Data":"d36d158bda666c23ed6022000dfa5fbf2867d83d065c03a9293477def80aa751"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.296792 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cxrh8" event={"ID":"246a1687-b876-4135-9a78-5aea28bd9663","Type":"ContainerStarted","Data":"e9062138416be1a6ca250e5e75bb8ca848889e121e9a428e3cd604a31e5c8df9"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.303387 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6dhf2" event={"ID":"61ff107a-0e1c-4c16-b842-7ee341347c9e","Type":"ContainerStarted","Data":"6b87f7e8236030d3084911733ac4af53b4d1e9f9c4db1326fa5769ed7e4e91e1"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.303421 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6dhf2" event={"ID":"61ff107a-0e1c-4c16-b842-7ee341347c9e","Type":"ContainerStarted","Data":"842bb467acae2a51ee1f5bf8eff18b4f2c8396c491f1f057a9b20da78e222f48"} Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.325802 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-tdc4c" podStartSLOduration=2.32578713 podStartE2EDuration="2.32578713s" podCreationTimestamp="2025-12-01 07:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:32.317543769 +0000 UTC m=+1187.638351445" watchObservedRunningTime="2025-12-01 07:10:32.32578713 +0000 UTC m=+1187.646594816" Dec 01 07:10:32 crc kubenswrapper[4822]: I1201 07:10:32.360107 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-6dhf2" podStartSLOduration=2.360090684 podStartE2EDuration="2.360090684s" podCreationTimestamp="2025-12-01 07:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:32.355068003 +0000 UTC m=+1187.675875689" watchObservedRunningTime="2025-12-01 07:10:32.360090684 +0000 UTC m=+1187.680898370" Dec 01 07:10:32 crc kubenswrapper[4822]: W1201 07:10:32.536402 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c1e0d17_7093_464d_ae8f_6b483a28558b.slice/crio-22c9b6550e22a998c4c5df5b8b74d5c0c028c1a7798d789dcd69cef6b31b25fb WatchSource:0}: Error finding container 22c9b6550e22a998c4c5df5b8b74d5c0c028c1a7798d789dcd69cef6b31b25fb: Status 404 returned error can't find the container with id 22c9b6550e22a998c4c5df5b8b74d5c0c028c1a7798d789dcd69cef6b31b25fb Dec 01 07:10:32 crc kubenswrapper[4822]: W1201 07:10:32.539372 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3aa3420d_c277_4e32_be0b_4e7c4b7a7f76.slice/crio-1be2cd0ffb8ae0e83d373c1b1cf876f19effa9892bb0d70faee583bf40c8a24a WatchSource:0}: Error finding container 1be2cd0ffb8ae0e83d373c1b1cf876f19effa9892bb0d70faee583bf40c8a24a: Status 404 returned error can't find the container with id 1be2cd0ffb8ae0e83d373c1b1cf876f19effa9892bb0d70faee583bf40c8a24a Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.315944 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-40f1-account-create-update-frwxk" event={"ID":"3f69aa7d-6165-42fb-a44e-1c0a25207b7c","Type":"ContainerStarted","Data":"bbb74df7e6125aca092a6af07124c2d09a96366bfe4d10cb7f87a04cced59681"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.316207 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-40f1-account-create-update-frwxk" event={"ID":"3f69aa7d-6165-42fb-a44e-1c0a25207b7c","Type":"ContainerStarted","Data":"b2b5f37112494841825417465a1c3c2e69aabff5c5f8a1a6017aca619ab1955a"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.319560 4822 generic.go:334] "Generic (PLEG): container finished" podID="61ff107a-0e1c-4c16-b842-7ee341347c9e" containerID="6b87f7e8236030d3084911733ac4af53b4d1e9f9c4db1326fa5769ed7e4e91e1" exitCode=0 Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.319647 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6dhf2" event={"ID":"61ff107a-0e1c-4c16-b842-7ee341347c9e","Type":"ContainerDied","Data":"6b87f7e8236030d3084911733ac4af53b4d1e9f9c4db1326fa5769ed7e4e91e1"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.324072 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-n4pm7" event={"ID":"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76","Type":"ContainerStarted","Data":"1be2cd0ffb8ae0e83d373c1b1cf876f19effa9892bb0d70faee583bf40c8a24a"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.334118 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.334201 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.336168 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-40f1-account-create-update-frwxk" podStartSLOduration=3.336143272 podStartE2EDuration="3.336143272s" podCreationTimestamp="2025-12-01 07:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:33.331636375 +0000 UTC m=+1188.652444071" watchObservedRunningTime="2025-12-01 07:10:33.336143272 +0000 UTC m=+1188.656950958" Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.336752 4822 generic.go:334] "Generic (PLEG): container finished" podID="be2b75bc-27b6-4e33-8f17-0c30b4512014" containerID="4897f922706c50863ab31b9b5bb98546c2f8892b8567054eb68d14885496f350" exitCode=0 Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.336814 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tdc4c" event={"ID":"be2b75bc-27b6-4e33-8f17-0c30b4512014","Type":"ContainerDied","Data":"4897f922706c50863ab31b9b5bb98546c2f8892b8567054eb68d14885496f350"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.337969 4822 generic.go:334] "Generic (PLEG): container finished" podID="1c1e0d17-7093-464d-ae8f-6b483a28558b" containerID="fe6640858936a836864b4bbca0ccc0aebc36c4ab31fea176ffa9a0222c6ad393" exitCode=0 Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.338168 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ac9b-account-create-update-khj8h" event={"ID":"1c1e0d17-7093-464d-ae8f-6b483a28558b","Type":"ContainerDied","Data":"fe6640858936a836864b4bbca0ccc0aebc36c4ab31fea176ffa9a0222c6ad393"} Dec 01 07:10:33 crc kubenswrapper[4822]: I1201 07:10:33.338189 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ac9b-account-create-update-khj8h" event={"ID":"1c1e0d17-7093-464d-ae8f-6b483a28558b","Type":"ContainerStarted","Data":"22c9b6550e22a998c4c5df5b8b74d5c0c028c1a7798d789dcd69cef6b31b25fb"} Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.835007 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.841588 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.848089 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a38c501-78bf-41cc-8caa-cf049fe4821d-operator-scripts\") pod \"4a38c501-78bf-41cc-8caa-cf049fe4821d\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.848208 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/246a1687-b876-4135-9a78-5aea28bd9663-operator-scripts\") pod \"246a1687-b876-4135-9a78-5aea28bd9663\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.848309 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcr5f\" (UniqueName: \"kubernetes.io/projected/246a1687-b876-4135-9a78-5aea28bd9663-kube-api-access-jcr5f\") pod \"246a1687-b876-4135-9a78-5aea28bd9663\" (UID: \"246a1687-b876-4135-9a78-5aea28bd9663\") " Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.848482 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25zrr\" (UniqueName: \"kubernetes.io/projected/4a38c501-78bf-41cc-8caa-cf049fe4821d-kube-api-access-25zrr\") pod \"4a38c501-78bf-41cc-8caa-cf049fe4821d\" (UID: \"4a38c501-78bf-41cc-8caa-cf049fe4821d\") " Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.849122 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a38c501-78bf-41cc-8caa-cf049fe4821d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a38c501-78bf-41cc-8caa-cf049fe4821d" (UID: "4a38c501-78bf-41cc-8caa-cf049fe4821d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.849711 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/246a1687-b876-4135-9a78-5aea28bd9663-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "246a1687-b876-4135-9a78-5aea28bd9663" (UID: "246a1687-b876-4135-9a78-5aea28bd9663"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.856583 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a38c501-78bf-41cc-8caa-cf049fe4821d-kube-api-access-25zrr" (OuterVolumeSpecName: "kube-api-access-25zrr") pod "4a38c501-78bf-41cc-8caa-cf049fe4821d" (UID: "4a38c501-78bf-41cc-8caa-cf049fe4821d"). InnerVolumeSpecName "kube-api-access-25zrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.865767 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/246a1687-b876-4135-9a78-5aea28bd9663-kube-api-access-jcr5f" (OuterVolumeSpecName: "kube-api-access-jcr5f") pod "246a1687-b876-4135-9a78-5aea28bd9663" (UID: "246a1687-b876-4135-9a78-5aea28bd9663"). InnerVolumeSpecName "kube-api-access-jcr5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.952906 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25zrr\" (UniqueName: \"kubernetes.io/projected/4a38c501-78bf-41cc-8caa-cf049fe4821d-kube-api-access-25zrr\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.952953 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a38c501-78bf-41cc-8caa-cf049fe4821d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.952970 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/246a1687-b876-4135-9a78-5aea28bd9663-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:33.952981 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcr5f\" (UniqueName: \"kubernetes.io/projected/246a1687-b876-4135-9a78-5aea28bd9663-kube-api-access-jcr5f\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.372823 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12"} Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.372866 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374"} Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.380559 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cxrh8" event={"ID":"246a1687-b876-4135-9a78-5aea28bd9663","Type":"ContainerDied","Data":"e9062138416be1a6ca250e5e75bb8ca848889e121e9a428e3cd604a31e5c8df9"} Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.380613 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9062138416be1a6ca250e5e75bb8ca848889e121e9a428e3cd604a31e5c8df9" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.383491 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cxrh8" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.387634 4822 generic.go:334] "Generic (PLEG): container finished" podID="3f69aa7d-6165-42fb-a44e-1c0a25207b7c" containerID="bbb74df7e6125aca092a6af07124c2d09a96366bfe4d10cb7f87a04cced59681" exitCode=0 Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.387781 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-40f1-account-create-update-frwxk" event={"ID":"3f69aa7d-6165-42fb-a44e-1c0a25207b7c","Type":"ContainerDied","Data":"bbb74df7e6125aca092a6af07124c2d09a96366bfe4d10cb7f87a04cced59681"} Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.397205 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6e4d-account-create-update-vz22v" event={"ID":"4a38c501-78bf-41cc-8caa-cf049fe4821d","Type":"ContainerDied","Data":"5a0919ee0579dd2af9ec38643eb422bdce5a682aee151ac7d8fb83739980cc6f"} Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.397249 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a0919ee0579dd2af9ec38643eb422bdce5a682aee151ac7d8fb83739980cc6f" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.397379 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6e4d-account-create-update-vz22v" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.801345 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.882741 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhpsc\" (UniqueName: \"kubernetes.io/projected/61ff107a-0e1c-4c16-b842-7ee341347c9e-kube-api-access-dhpsc\") pod \"61ff107a-0e1c-4c16-b842-7ee341347c9e\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.882846 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61ff107a-0e1c-4c16-b842-7ee341347c9e-operator-scripts\") pod \"61ff107a-0e1c-4c16-b842-7ee341347c9e\" (UID: \"61ff107a-0e1c-4c16-b842-7ee341347c9e\") " Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.884254 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61ff107a-0e1c-4c16-b842-7ee341347c9e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61ff107a-0e1c-4c16-b842-7ee341347c9e" (UID: "61ff107a-0e1c-4c16-b842-7ee341347c9e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.901946 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61ff107a-0e1c-4c16-b842-7ee341347c9e-kube-api-access-dhpsc" (OuterVolumeSpecName: "kube-api-access-dhpsc") pod "61ff107a-0e1c-4c16-b842-7ee341347c9e" (UID: "61ff107a-0e1c-4c16-b842-7ee341347c9e"). InnerVolumeSpecName "kube-api-access-dhpsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.985996 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61ff107a-0e1c-4c16-b842-7ee341347c9e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:34 crc kubenswrapper[4822]: I1201 07:10:34.986028 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhpsc\" (UniqueName: \"kubernetes.io/projected/61ff107a-0e1c-4c16-b842-7ee341347c9e-kube-api-access-dhpsc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.244005 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.254582 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.293184 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82sks\" (UniqueName: \"kubernetes.io/projected/be2b75bc-27b6-4e33-8f17-0c30b4512014-kube-api-access-82sks\") pod \"be2b75bc-27b6-4e33-8f17-0c30b4512014\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.293432 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1e0d17-7093-464d-ae8f-6b483a28558b-operator-scripts\") pod \"1c1e0d17-7093-464d-ae8f-6b483a28558b\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.293480 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be2b75bc-27b6-4e33-8f17-0c30b4512014-operator-scripts\") pod \"be2b75bc-27b6-4e33-8f17-0c30b4512014\" (UID: \"be2b75bc-27b6-4e33-8f17-0c30b4512014\") " Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.293534 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6kqr\" (UniqueName: \"kubernetes.io/projected/1c1e0d17-7093-464d-ae8f-6b483a28558b-kube-api-access-c6kqr\") pod \"1c1e0d17-7093-464d-ae8f-6b483a28558b\" (UID: \"1c1e0d17-7093-464d-ae8f-6b483a28558b\") " Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.294627 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c1e0d17-7093-464d-ae8f-6b483a28558b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1c1e0d17-7093-464d-ae8f-6b483a28558b" (UID: "1c1e0d17-7093-464d-ae8f-6b483a28558b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.295470 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be2b75bc-27b6-4e33-8f17-0c30b4512014-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "be2b75bc-27b6-4e33-8f17-0c30b4512014" (UID: "be2b75bc-27b6-4e33-8f17-0c30b4512014"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.297580 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c1e0d17-7093-464d-ae8f-6b483a28558b-kube-api-access-c6kqr" (OuterVolumeSpecName: "kube-api-access-c6kqr") pod "1c1e0d17-7093-464d-ae8f-6b483a28558b" (UID: "1c1e0d17-7093-464d-ae8f-6b483a28558b"). InnerVolumeSpecName "kube-api-access-c6kqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.301292 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be2b75bc-27b6-4e33-8f17-0c30b4512014-kube-api-access-82sks" (OuterVolumeSpecName: "kube-api-access-82sks") pod "be2b75bc-27b6-4e33-8f17-0c30b4512014" (UID: "be2b75bc-27b6-4e33-8f17-0c30b4512014"). InnerVolumeSpecName "kube-api-access-82sks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.398720 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c1e0d17-7093-464d-ae8f-6b483a28558b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.399669 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/be2b75bc-27b6-4e33-8f17-0c30b4512014-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.399760 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6kqr\" (UniqueName: \"kubernetes.io/projected/1c1e0d17-7093-464d-ae8f-6b483a28558b-kube-api-access-c6kqr\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.399840 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82sks\" (UniqueName: \"kubernetes.io/projected/be2b75bc-27b6-4e33-8f17-0c30b4512014-kube-api-access-82sks\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.407352 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6dhf2" event={"ID":"61ff107a-0e1c-4c16-b842-7ee341347c9e","Type":"ContainerDied","Data":"842bb467acae2a51ee1f5bf8eff18b4f2c8396c491f1f057a9b20da78e222f48"} Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.407497 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="842bb467acae2a51ee1f5bf8eff18b4f2c8396c491f1f057a9b20da78e222f48" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.407447 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6dhf2" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.409883 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-tdc4c" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.409882 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-tdc4c" event={"ID":"be2b75bc-27b6-4e33-8f17-0c30b4512014","Type":"ContainerDied","Data":"86eb851f446e9bec48e7f91959302fbaf011d45f4be1473034a4e4e2533fe284"} Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.410025 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86eb851f446e9bec48e7f91959302fbaf011d45f4be1473034a4e4e2533fe284" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.413876 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ac9b-account-create-update-khj8h" event={"ID":"1c1e0d17-7093-464d-ae8f-6b483a28558b","Type":"ContainerDied","Data":"22c9b6550e22a998c4c5df5b8b74d5c0c028c1a7798d789dcd69cef6b31b25fb"} Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.413929 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22c9b6550e22a998c4c5df5b8b74d5c0c028c1a7798d789dcd69cef6b31b25fb" Dec 01 07:10:35 crc kubenswrapper[4822]: I1201 07:10:35.413930 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ac9b-account-create-update-khj8h" Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.502235 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-40f1-account-create-update-frwxk" event={"ID":"3f69aa7d-6165-42fb-a44e-1c0a25207b7c","Type":"ContainerDied","Data":"b2b5f37112494841825417465a1c3c2e69aabff5c5f8a1a6017aca619ab1955a"} Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.503159 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2b5f37112494841825417465a1c3c2e69aabff5c5f8a1a6017aca619ab1955a" Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.557611 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.571150 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-operator-scripts\") pod \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.571326 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lsp7\" (UniqueName: \"kubernetes.io/projected/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-kube-api-access-2lsp7\") pod \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\" (UID: \"3f69aa7d-6165-42fb-a44e-1c0a25207b7c\") " Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.573189 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3f69aa7d-6165-42fb-a44e-1c0a25207b7c" (UID: "3f69aa7d-6165-42fb-a44e-1c0a25207b7c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.577580 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-kube-api-access-2lsp7" (OuterVolumeSpecName: "kube-api-access-2lsp7") pod "3f69aa7d-6165-42fb-a44e-1c0a25207b7c" (UID: "3f69aa7d-6165-42fb-a44e-1c0a25207b7c"). InnerVolumeSpecName "kube-api-access-2lsp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.673232 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lsp7\" (UniqueName: \"kubernetes.io/projected/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-kube-api-access-2lsp7\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:38 crc kubenswrapper[4822]: I1201 07:10:38.673267 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f69aa7d-6165-42fb-a44e-1c0a25207b7c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.539213 4822 generic.go:334] "Generic (PLEG): container finished" podID="3f992a05-1279-4c84-b09b-977a6b1e4ea8" containerID="75e7995266bdbf6492daa619b0f5223c1c2481c8944e162cc407d65a37b501f6" exitCode=0 Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.539395 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h9ttd" event={"ID":"3f992a05-1279-4c84-b09b-977a6b1e4ea8","Type":"ContainerDied","Data":"75e7995266bdbf6492daa619b0f5223c1c2481c8944e162cc407d65a37b501f6"} Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.545315 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-n4pm7" event={"ID":"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76","Type":"ContainerStarted","Data":"b8a08041b8cc69c12a5f58bc931a20fd82d2fe38b1578f6ab130694d4022707d"} Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.556252 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-40f1-account-create-update-frwxk" Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.557083 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347"} Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.557124 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147"} Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.557139 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905"} Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.557150 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151"} Dec 01 07:10:39 crc kubenswrapper[4822]: I1201 07:10:39.587298 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-n4pm7" podStartSLOduration=3.801671908 podStartE2EDuration="9.587279615s" podCreationTimestamp="2025-12-01 07:10:30 +0000 UTC" firstStartedPulling="2025-12-01 07:10:32.559143534 +0000 UTC m=+1187.879951220" lastFinishedPulling="2025-12-01 07:10:38.344751221 +0000 UTC m=+1193.665558927" observedRunningTime="2025-12-01 07:10:39.578787417 +0000 UTC m=+1194.899595103" watchObservedRunningTime="2025-12-01 07:10:39.587279615 +0000 UTC m=+1194.908087291" Dec 01 07:10:40 crc kubenswrapper[4822]: I1201 07:10:40.588992 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.071483 4822 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podd6636d12-3187-445d-afaf-2218dd71d932"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podd6636d12-3187-445d-afaf-2218dd71d932] : Timed out while waiting for systemd to remove kubepods-besteffort-podd6636d12_3187_445d_afaf_2218dd71d932.slice" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.071871 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podd6636d12-3187-445d-afaf-2218dd71d932] : unable to destroy cgroup paths for cgroup [kubepods besteffort podd6636d12-3187-445d-afaf-2218dd71d932] : Timed out while waiting for systemd to remove kubepods-besteffort-podd6636d12_3187_445d_afaf_2218dd71d932.slice" pod="openstack/glance-8845-account-create-update-rjq78" podUID="d6636d12-3187-445d-afaf-2218dd71d932" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.159097 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.222876 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-combined-ca-bundle\") pod \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.223053 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-547xt\" (UniqueName: \"kubernetes.io/projected/3f992a05-1279-4c84-b09b-977a6b1e4ea8-kube-api-access-547xt\") pod \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.223182 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-db-sync-config-data\") pod \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.223210 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-config-data\") pod \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\" (UID: \"3f992a05-1279-4c84-b09b-977a6b1e4ea8\") " Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.230713 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f992a05-1279-4c84-b09b-977a6b1e4ea8-kube-api-access-547xt" (OuterVolumeSpecName: "kube-api-access-547xt") pod "3f992a05-1279-4c84-b09b-977a6b1e4ea8" (UID: "3f992a05-1279-4c84-b09b-977a6b1e4ea8"). InnerVolumeSpecName "kube-api-access-547xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.230949 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3f992a05-1279-4c84-b09b-977a6b1e4ea8" (UID: "3f992a05-1279-4c84-b09b-977a6b1e4ea8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.255642 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f992a05-1279-4c84-b09b-977a6b1e4ea8" (UID: "3f992a05-1279-4c84-b09b-977a6b1e4ea8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.296725 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-config-data" (OuterVolumeSpecName: "config-data") pod "3f992a05-1279-4c84-b09b-977a6b1e4ea8" (UID: "3f992a05-1279-4c84-b09b-977a6b1e4ea8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.326343 4822 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.326373 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.326382 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f992a05-1279-4c84-b09b-977a6b1e4ea8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.326392 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-547xt\" (UniqueName: \"kubernetes.io/projected/3f992a05-1279-4c84-b09b-977a6b1e4ea8-kube-api-access-547xt\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.620034 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.620099 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.620114 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.620125 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.620138 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.623231 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8845-account-create-update-rjq78" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.623266 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-h9ttd" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.623221 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-h9ttd" event={"ID":"3f992a05-1279-4c84-b09b-977a6b1e4ea8","Type":"ContainerDied","Data":"583076057f35817395ebb630bdebd796135f65cba6e82ca4b88083d401417932"} Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.623399 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="583076057f35817395ebb630bdebd796135f65cba6e82ca4b88083d401417932" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.941838 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-tjwwv"] Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942785 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f69aa7d-6165-42fb-a44e-1c0a25207b7c" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942806 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f69aa7d-6165-42fb-a44e-1c0a25207b7c" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942820 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ff107a-0e1c-4c16-b842-7ee341347c9e" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942826 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ff107a-0e1c-4c16-b842-7ee341347c9e" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942847 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="246a1687-b876-4135-9a78-5aea28bd9663" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942853 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="246a1687-b876-4135-9a78-5aea28bd9663" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942868 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a38c501-78bf-41cc-8caa-cf049fe4821d" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942875 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a38c501-78bf-41cc-8caa-cf049fe4821d" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942889 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f992a05-1279-4c84-b09b-977a6b1e4ea8" containerName="glance-db-sync" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942898 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f992a05-1279-4c84-b09b-977a6b1e4ea8" containerName="glance-db-sync" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942916 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2b75bc-27b6-4e33-8f17-0c30b4512014" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942922 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2b75bc-27b6-4e33-8f17-0c30b4512014" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: E1201 07:10:41.942932 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1e0d17-7093-464d-ae8f-6b483a28558b" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.942938 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1e0d17-7093-464d-ae8f-6b483a28558b" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943141 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c1e0d17-7093-464d-ae8f-6b483a28558b" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943162 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f69aa7d-6165-42fb-a44e-1c0a25207b7c" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943177 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="61ff107a-0e1c-4c16-b842-7ee341347c9e" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943191 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a38c501-78bf-41cc-8caa-cf049fe4821d" containerName="mariadb-account-create-update" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943201 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f992a05-1279-4c84-b09b-977a6b1e4ea8" containerName="glance-db-sync" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943209 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="246a1687-b876-4135-9a78-5aea28bd9663" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.943216 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="be2b75bc-27b6-4e33-8f17-0c30b4512014" containerName="mariadb-database-create" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.944315 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:41 crc kubenswrapper[4822]: I1201 07:10:41.976309 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-tjwwv"] Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.039121 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-sb\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.039246 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq6wk\" (UniqueName: \"kubernetes.io/projected/3e1df4f0-59b0-4469-94a3-fb8c07516564-kube-api-access-sq6wk\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.039289 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-nb\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.039315 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-dns-svc\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.039494 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-config\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.141890 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-sb\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.141958 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq6wk\" (UniqueName: \"kubernetes.io/projected/3e1df4f0-59b0-4469-94a3-fb8c07516564-kube-api-access-sq6wk\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.141996 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-nb\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.142024 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-dns-svc\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.142065 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-config\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.143163 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-nb\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.143192 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-config\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.143233 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-sb\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.143525 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-dns-svc\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.170762 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq6wk\" (UniqueName: \"kubernetes.io/projected/3e1df4f0-59b0-4469-94a3-fb8c07516564-kube-api-access-sq6wk\") pod \"dnsmasq-dns-6df78bdcfc-tjwwv\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.268717 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.640104 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerStarted","Data":"b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed"} Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.642490 4822 generic.go:334] "Generic (PLEG): container finished" podID="3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" containerID="b8a08041b8cc69c12a5f58bc931a20fd82d2fe38b1578f6ab130694d4022707d" exitCode=0 Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.642524 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-n4pm7" event={"ID":"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76","Type":"ContainerDied","Data":"b8a08041b8cc69c12a5f58bc931a20fd82d2fe38b1578f6ab130694d4022707d"} Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.675047 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=39.240290489 podStartE2EDuration="49.675025064s" podCreationTimestamp="2025-12-01 07:09:53 +0000 UTC" firstStartedPulling="2025-12-01 07:10:29.729138987 +0000 UTC m=+1185.049946663" lastFinishedPulling="2025-12-01 07:10:40.163873542 +0000 UTC m=+1195.484681238" observedRunningTime="2025-12-01 07:10:42.668255374 +0000 UTC m=+1197.989063060" watchObservedRunningTime="2025-12-01 07:10:42.675025064 +0000 UTC m=+1197.995832750" Dec 01 07:10:42 crc kubenswrapper[4822]: I1201 07:10:42.751442 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-tjwwv"] Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.121851 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-tjwwv"] Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.160692 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-8sljt"] Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.162149 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.164473 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.165695 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-svc\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.165741 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.165773 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.165800 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fws2l\" (UniqueName: \"kubernetes.io/projected/dcdede93-c2d5-4e02-b96a-9ef61503819e-kube-api-access-fws2l\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.165851 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-config\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.165886 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.183699 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-8sljt"] Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.268184 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-svc\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.268256 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.268289 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.268317 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fws2l\" (UniqueName: \"kubernetes.io/projected/dcdede93-c2d5-4e02-b96a-9ef61503819e-kube-api-access-fws2l\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.268366 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-config\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.268407 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.269176 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.269333 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-config\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.269512 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.269609 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.269788 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-svc\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.286908 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fws2l\" (UniqueName: \"kubernetes.io/projected/dcdede93-c2d5-4e02-b96a-9ef61503819e-kube-api-access-fws2l\") pod \"dnsmasq-dns-5bfc9d5487-8sljt\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.492359 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.656259 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" event={"ID":"3e1df4f0-59b0-4469-94a3-fb8c07516564","Type":"ContainerDied","Data":"abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739"} Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.656231 4822 generic.go:334] "Generic (PLEG): container finished" podID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerID="abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739" exitCode=0 Dec 01 07:10:43 crc kubenswrapper[4822]: I1201 07:10:43.656692 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" event={"ID":"3e1df4f0-59b0-4469-94a3-fb8c07516564","Type":"ContainerStarted","Data":"e534261d44510494d6f23bb6f7d893b1c74c4a60d1facb5afd2da2470fd2c2b4"} Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.007221 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-8sljt"] Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.209220 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.310443 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwrct\" (UniqueName: \"kubernetes.io/projected/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-kube-api-access-fwrct\") pod \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.310588 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-config-data\") pod \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.310781 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-combined-ca-bundle\") pod \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\" (UID: \"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76\") " Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.314203 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-kube-api-access-fwrct" (OuterVolumeSpecName: "kube-api-access-fwrct") pod "3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" (UID: "3aa3420d-c277-4e32-be0b-4e7c4b7a7f76"). InnerVolumeSpecName "kube-api-access-fwrct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.358668 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" (UID: "3aa3420d-c277-4e32-be0b-4e7c4b7a7f76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.375890 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-config-data" (OuterVolumeSpecName: "config-data") pod "3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" (UID: "3aa3420d-c277-4e32-be0b-4e7c4b7a7f76"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.412740 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.412776 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwrct\" (UniqueName: \"kubernetes.io/projected/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-kube-api-access-fwrct\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.412786 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.673444 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" event={"ID":"3e1df4f0-59b0-4469-94a3-fb8c07516564","Type":"ContainerStarted","Data":"bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430"} Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.673596 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.673607 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerName="dnsmasq-dns" containerID="cri-o://bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430" gracePeriod=10 Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.678271 4822 generic.go:334] "Generic (PLEG): container finished" podID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerID="c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422" exitCode=0 Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.678330 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" event={"ID":"dcdede93-c2d5-4e02-b96a-9ef61503819e","Type":"ContainerDied","Data":"c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422"} Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.678356 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" event={"ID":"dcdede93-c2d5-4e02-b96a-9ef61503819e","Type":"ContainerStarted","Data":"fd4ba30c7c21d6f81ace08a3e4984bcf145fa9046d99f56044d2c1628d147b9a"} Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.684241 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-n4pm7" event={"ID":"3aa3420d-c277-4e32-be0b-4e7c4b7a7f76","Type":"ContainerDied","Data":"1be2cd0ffb8ae0e83d373c1b1cf876f19effa9892bb0d70faee583bf40c8a24a"} Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.684290 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1be2cd0ffb8ae0e83d373c1b1cf876f19effa9892bb0d70faee583bf40c8a24a" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.684353 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-n4pm7" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.725987 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" podStartSLOduration=3.725971827 podStartE2EDuration="3.725971827s" podCreationTimestamp="2025-12-01 07:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:44.711324686 +0000 UTC m=+1200.032132392" watchObservedRunningTime="2025-12-01 07:10:44.725971827 +0000 UTC m=+1200.046779513" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.939139 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-8sljt"] Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.989392 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-2z2x7"] Dec 01 07:10:44 crc kubenswrapper[4822]: E1201 07:10:44.989828 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" containerName="keystone-db-sync" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.989841 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" containerName="keystone-db-sync" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.990004 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" containerName="keystone-db-sync" Dec 01 07:10:44 crc kubenswrapper[4822]: I1201 07:10:44.990828 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.023193 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cdbjg"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.038809 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.042773 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.043072 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.043227 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.043513 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dcwzn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.043666 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142305 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwqfg\" (UniqueName: \"kubernetes.io/projected/7bfdfa19-8696-4747-9d07-004881884560-kube-api-access-pwqfg\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142364 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-config\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142406 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-config-data\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142435 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-sb\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142470 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-swift-storage-0\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142489 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-nb\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142526 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-scripts\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142561 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-combined-ca-bundle\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142599 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-fernet-keys\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142650 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-credential-keys\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142667 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-svc\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.142691 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6sl7\" (UniqueName: \"kubernetes.io/projected/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-kube-api-access-h6sl7\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.175094 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-2z2x7"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.214669 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cdbjg"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248288 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-svc\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248374 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6sl7\" (UniqueName: \"kubernetes.io/projected/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-kube-api-access-h6sl7\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248403 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwqfg\" (UniqueName: \"kubernetes.io/projected/7bfdfa19-8696-4747-9d07-004881884560-kube-api-access-pwqfg\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248421 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-config\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248452 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-config-data\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248476 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-sb\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248500 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-swift-storage-0\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248521 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-nb\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248574 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-scripts\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248606 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-combined-ca-bundle\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248626 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-fernet-keys\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.248667 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-credential-keys\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.250652 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-sb\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.251287 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-svc\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.259247 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-config\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.266677 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-swift-storage-0\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.268452 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-credential-keys\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.270602 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-scripts\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.275461 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-nb\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.284817 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-config-data\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.285216 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-fernet-keys\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.285258 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-mhhlz"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.286238 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-combined-ca-bundle\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.286373 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.290847 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.291114 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8bxmm" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.291266 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.303773 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwqfg\" (UniqueName: \"kubernetes.io/projected/7bfdfa19-8696-4747-9d07-004881884560-kube-api-access-pwqfg\") pod \"dnsmasq-dns-65c6dfc787-2z2x7\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.309356 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6sl7\" (UniqueName: \"kubernetes.io/projected/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-kube-api-access-h6sl7\") pod \"keystone-bootstrap-cdbjg\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.310474 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-mhhlz"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.326628 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-7wprf"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.327833 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.352887 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.353152 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.353160 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-2z2x7"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.353290 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-lhmwk" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.354222 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.412330 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.416673 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-xppc9"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.429204 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.436658 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-zndwr"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.438060 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.440516 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.444032 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.444217 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qzrkz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.444349 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vqghq" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.448256 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457302 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-combined-ca-bundle\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457358 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz8lf\" (UniqueName: \"kubernetes.io/projected/eb723787-e103-4feb-9aba-df1a78059e4c-kube-api-access-kz8lf\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457378 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-config-data\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457465 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-db-sync-config-data\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457502 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-config\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457564 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-etc-machine-id\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457596 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-combined-ca-bundle\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457676 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78cv6\" (UniqueName: \"kubernetes.io/projected/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-kube-api-access-78cv6\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.457701 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-scripts\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.466262 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xppc9"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.467295 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.515828 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7wprf"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.530498 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f645789c-s26qn"] Dec 01 07:10:45 crc kubenswrapper[4822]: E1201 07:10:45.530893 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerName="init" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.530910 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerName="init" Dec 01 07:10:45 crc kubenswrapper[4822]: E1201 07:10:45.530939 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerName="dnsmasq-dns" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.530945 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerName="dnsmasq-dns" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.531107 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerName="dnsmasq-dns" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.531965 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563205 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq6wk\" (UniqueName: \"kubernetes.io/projected/3e1df4f0-59b0-4469-94a3-fb8c07516564-kube-api-access-sq6wk\") pod \"3e1df4f0-59b0-4469-94a3-fb8c07516564\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563286 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-config\") pod \"3e1df4f0-59b0-4469-94a3-fb8c07516564\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563324 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-nb\") pod \"3e1df4f0-59b0-4469-94a3-fb8c07516564\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563470 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-sb\") pod \"3e1df4f0-59b0-4469-94a3-fb8c07516564\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563513 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-dns-svc\") pod \"3e1df4f0-59b0-4469-94a3-fb8c07516564\" (UID: \"3e1df4f0-59b0-4469-94a3-fb8c07516564\") " Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563722 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78cv6\" (UniqueName: \"kubernetes.io/projected/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-kube-api-access-78cv6\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563756 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-scripts\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563811 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-combined-ca-bundle\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563857 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-scripts\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563888 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-combined-ca-bundle\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563921 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75dvt\" (UniqueName: \"kubernetes.io/projected/fb9401cf-62a5-407e-8ac0-88d0eecc830d-kube-api-access-75dvt\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563945 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-db-sync-config-data\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563967 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz8lf\" (UniqueName: \"kubernetes.io/projected/eb723787-e103-4feb-9aba-df1a78059e4c-kube-api-access-kz8lf\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.563984 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-config-data\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564000 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-config-data\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564013 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9401cf-62a5-407e-8ac0-88d0eecc830d-logs\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564035 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrrkq\" (UniqueName: \"kubernetes.io/projected/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-kube-api-access-mrrkq\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564077 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-db-sync-config-data\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564101 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-config\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564132 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-etc-machine-id\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564154 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-combined-ca-bundle\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.564170 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-combined-ca-bundle\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.599953 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-etc-machine-id\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.615456 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-zndwr"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.631351 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz8lf\" (UniqueName: \"kubernetes.io/projected/eb723787-e103-4feb-9aba-df1a78059e4c-kube-api-access-kz8lf\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.643643 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78cv6\" (UniqueName: \"kubernetes.io/projected/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-kube-api-access-78cv6\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.651266 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-scripts\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.659670 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-config-data\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.660170 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-combined-ca-bundle\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.660497 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-combined-ca-bundle\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666631 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-config\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666674 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-combined-ca-bundle\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666711 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-swift-storage-0\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666737 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666764 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-combined-ca-bundle\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666813 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-scripts\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666831 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-nb\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666855 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75dvt\" (UniqueName: \"kubernetes.io/projected/fb9401cf-62a5-407e-8ac0-88d0eecc830d-kube-api-access-75dvt\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666873 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-db-sync-config-data\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666894 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-config-data\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666907 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9401cf-62a5-407e-8ac0-88d0eecc830d-logs\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666923 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrrkq\" (UniqueName: \"kubernetes.io/projected/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-kube-api-access-mrrkq\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.666982 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-svc\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.667002 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq87n\" (UniqueName: \"kubernetes.io/projected/d28f5670-3f16-470f-8a83-a1dcaca7bce4-kube-api-access-lq87n\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.671055 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-db-sync-config-data\") pod \"cinder-db-sync-7wprf\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.672239 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e1df4f0-59b0-4469-94a3-fb8c07516564-kube-api-access-sq6wk" (OuterVolumeSpecName: "kube-api-access-sq6wk") pod "3e1df4f0-59b0-4469-94a3-fb8c07516564" (UID: "3e1df4f0-59b0-4469-94a3-fb8c07516564"). InnerVolumeSpecName "kube-api-access-sq6wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.672901 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-config\") pod \"neutron-db-sync-mhhlz\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.674366 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9401cf-62a5-407e-8ac0-88d0eecc830d-logs\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.691449 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-s26qn"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.704094 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-config" (OuterVolumeSpecName: "config") pod "3e1df4f0-59b0-4469-94a3-fb8c07516564" (UID: "3e1df4f0-59b0-4469-94a3-fb8c07516564"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.709180 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrrkq\" (UniqueName: \"kubernetes.io/projected/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-kube-api-access-mrrkq\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.710057 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-db-sync-config-data\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.711880 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-config-data\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.715138 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75dvt\" (UniqueName: \"kubernetes.io/projected/fb9401cf-62a5-407e-8ac0-88d0eecc830d-kube-api-access-75dvt\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.717770 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-combined-ca-bundle\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.730056 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-combined-ca-bundle\") pod \"barbican-db-sync-zndwr\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.736287 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-scripts\") pod \"placement-db-sync-xppc9\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779709 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-nb\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779800 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-svc\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779818 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq87n\" (UniqueName: \"kubernetes.io/projected/d28f5670-3f16-470f-8a83-a1dcaca7bce4-kube-api-access-lq87n\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779846 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-config\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779878 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-swift-storage-0\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779902 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779949 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq6wk\" (UniqueName: \"kubernetes.io/projected/3e1df4f0-59b0-4469-94a3-fb8c07516564-kube-api-access-sq6wk\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.779960 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.780596 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-nb\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.782661 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.783272 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-config\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.785230 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.793171 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-svc\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.797721 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-swift-storage-0\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.804472 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.808179 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.809395 4822 generic.go:334] "Generic (PLEG): container finished" podID="3e1df4f0-59b0-4469-94a3-fb8c07516564" containerID="bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430" exitCode=0 Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.809495 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" event={"ID":"3e1df4f0-59b0-4469-94a3-fb8c07516564","Type":"ContainerDied","Data":"bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430"} Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.809520 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" event={"ID":"3e1df4f0-59b0-4469-94a3-fb8c07516564","Type":"ContainerDied","Data":"e534261d44510494d6f23bb6f7d893b1c74c4a60d1facb5afd2da2470fd2c2b4"} Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.809540 4822 scope.go:117] "RemoveContainer" containerID="bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.810105 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-tjwwv" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.813276 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.813597 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.848895 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq87n\" (UniqueName: \"kubernetes.io/projected/d28f5670-3f16-470f-8a83-a1dcaca7bce4-kube-api-access-lq87n\") pod \"dnsmasq-dns-55f645789c-s26qn\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.857280 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wprf" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.874187 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883039 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883126 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883174 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-run-httpd\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883209 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-scripts\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883241 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldk4q\" (UniqueName: \"kubernetes.io/projected/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-kube-api-access-ldk4q\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883297 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-log-httpd\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.883338 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-config-data\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.893695 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e1df4f0-59b0-4469-94a3-fb8c07516564" (UID: "3e1df4f0-59b0-4469-94a3-fb8c07516564"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.905235 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" event={"ID":"dcdede93-c2d5-4e02-b96a-9ef61503819e","Type":"ContainerStarted","Data":"c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02"} Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.905362 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerName="dnsmasq-dns" containerID="cri-o://c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02" gracePeriod=10 Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.905485 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.915628 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e1df4f0-59b0-4469-94a3-fb8c07516564" (UID: "3e1df4f0-59b0-4469-94a3-fb8c07516564"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.926197 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e1df4f0-59b0-4469-94a3-fb8c07516564" (UID: "3e1df4f0-59b0-4469-94a3-fb8c07516564"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.940235 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xppc9" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.980364 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-zndwr" Dec 01 07:10:45 crc kubenswrapper[4822]: I1201 07:10:45.988585 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldk4q\" (UniqueName: \"kubernetes.io/projected/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-kube-api-access-ldk4q\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.000778 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-log-httpd\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.002269 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-config-data\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.002668 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.002897 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.003092 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-run-httpd\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.003269 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-scripts\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.003576 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.002910 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-log-httpd\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.004140 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-run-httpd\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.005362 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.006708 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e1df4f0-59b0-4469-94a3-fb8c07516564-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.007520 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.026934 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldk4q\" (UniqueName: \"kubernetes.io/projected/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-kube-api-access-ldk4q\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.028068 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.036522 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.038944 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-config-data\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.047787 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-scripts\") pod \"ceilometer-0\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.058876 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" podStartSLOduration=3.05884967 podStartE2EDuration="3.05884967s" podCreationTimestamp="2025-12-01 07:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:45.951771252 +0000 UTC m=+1201.272578938" watchObservedRunningTime="2025-12-01 07:10:46.05884967 +0000 UTC m=+1201.379657366" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.071894 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.114669 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.120278 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.124933 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-zzv5d" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.125143 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.125249 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.139965 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.212987 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chsdl\" (UniqueName: \"kubernetes.io/projected/4df73001-6ac3-4080-ba58-a7779e8c03d1-kube-api-access-chsdl\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.213070 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.213272 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.213430 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.213492 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.213561 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-logs\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.213603 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.218627 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.220023 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.222535 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.236633 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-tjwwv"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.256226 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-tjwwv"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.269295 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.273919 4822 scope.go:117] "RemoveContainer" containerID="abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.281467 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-2z2x7"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331322 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331384 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331422 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331456 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331487 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331569 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331591 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331627 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331660 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-logs\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331686 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331713 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331741 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg2hh\" (UniqueName: \"kubernetes.io/projected/1f9fc2d4-8719-4887-817a-5a75520483b6-kube-api-access-qg2hh\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331778 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chsdl\" (UniqueName: \"kubernetes.io/projected/4df73001-6ac3-4080-ba58-a7779e8c03d1-kube-api-access-chsdl\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331808 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: W1201 07:10:46.331506 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bfdfa19_8696_4747_9d07_004881884560.slice/crio-51f22b3121973de344c61a9abe6a4c44f4a9cd9ed6023ffcd974214cd9e8e8a1 WatchSource:0}: Error finding container 51f22b3121973de344c61a9abe6a4c44f4a9cd9ed6023ffcd974214cd9e8e8a1: Status 404 returned error can't find the container with id 51f22b3121973de344c61a9abe6a4c44f4a9cd9ed6023ffcd974214cd9e8e8a1 Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.332331 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.333683 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-logs\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.331778 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.340141 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.343380 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.344009 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.348207 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chsdl\" (UniqueName: \"kubernetes.io/projected/4df73001-6ac3-4080-ba58-a7779e8c03d1-kube-api-access-chsdl\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.369587 4822 scope.go:117] "RemoveContainer" containerID="bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430" Dec 01 07:10:46 crc kubenswrapper[4822]: E1201 07:10:46.371935 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430\": container with ID starting with bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430 not found: ID does not exist" containerID="bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.371973 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430"} err="failed to get container status \"bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430\": rpc error: code = NotFound desc = could not find container \"bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430\": container with ID starting with bc898506fe2140dde23727ea5c63ed42da96431c7fbd321c62be5c5311c15430 not found: ID does not exist" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.371997 4822 scope.go:117] "RemoveContainer" containerID="abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739" Dec 01 07:10:46 crc kubenswrapper[4822]: E1201 07:10:46.374829 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739\": container with ID starting with abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739 not found: ID does not exist" containerID="abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.374856 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739"} err="failed to get container status \"abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739\": rpc error: code = NotFound desc = could not find container \"abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739\": container with ID starting with abb201964ab8679c92f6e8177670c5aa9ef273925df7913a6adabb9a46980739 not found: ID does not exist" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.388404 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.432929 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.432994 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433019 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg2hh\" (UniqueName: \"kubernetes.io/projected/1f9fc2d4-8719-4887-817a-5a75520483b6-kube-api-access-qg2hh\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433066 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433092 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433115 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433140 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433283 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.433962 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.436255 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.450458 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cdbjg"] Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.456387 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.469578 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.470771 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.471910 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: W1201 07:10:46.473430 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod112e181f_4b31_4ac6_b92c_0501e0fdfe1d.slice/crio-ff85fa0baf44177d7eb52585e3255a259108844c6679a54aa32f0031d2c9392d WatchSource:0}: Error finding container ff85fa0baf44177d7eb52585e3255a259108844c6679a54aa32f0031d2c9392d: Status 404 returned error can't find the container with id ff85fa0baf44177d7eb52585e3255a259108844c6679a54aa32f0031d2c9392d Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.475146 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg2hh\" (UniqueName: \"kubernetes.io/projected/1f9fc2d4-8719-4887-817a-5a75520483b6-kube-api-access-qg2hh\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.511671 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.568723 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.720859 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xppc9"] Dec 01 07:10:46 crc kubenswrapper[4822]: W1201 07:10:46.727681 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb9401cf_62a5_407e_8ac0_88d0eecc830d.slice/crio-c3ef05bdb917bb04c191537d0836844b2c9dedacb5a630ea15dda06d5e73bb6b WatchSource:0}: Error finding container c3ef05bdb917bb04c191537d0836844b2c9dedacb5a630ea15dda06d5e73bb6b: Status 404 returned error can't find the container with id c3ef05bdb917bb04c191537d0836844b2c9dedacb5a630ea15dda06d5e73bb6b Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.774671 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.843370 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-sb\") pod \"dcdede93-c2d5-4e02-b96a-9ef61503819e\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.843476 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-nb\") pod \"dcdede93-c2d5-4e02-b96a-9ef61503819e\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.843504 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fws2l\" (UniqueName: \"kubernetes.io/projected/dcdede93-c2d5-4e02-b96a-9ef61503819e-kube-api-access-fws2l\") pod \"dcdede93-c2d5-4e02-b96a-9ef61503819e\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.843527 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-svc\") pod \"dcdede93-c2d5-4e02-b96a-9ef61503819e\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.843668 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-config\") pod \"dcdede93-c2d5-4e02-b96a-9ef61503819e\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.843714 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-swift-storage-0\") pod \"dcdede93-c2d5-4e02-b96a-9ef61503819e\" (UID: \"dcdede93-c2d5-4e02-b96a-9ef61503819e\") " Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.854400 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcdede93-c2d5-4e02-b96a-9ef61503819e-kube-api-access-fws2l" (OuterVolumeSpecName: "kube-api-access-fws2l") pod "dcdede93-c2d5-4e02-b96a-9ef61503819e" (UID: "dcdede93-c2d5-4e02-b96a-9ef61503819e"). InnerVolumeSpecName "kube-api-access-fws2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.931291 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dcdede93-c2d5-4e02-b96a-9ef61503819e" (UID: "dcdede93-c2d5-4e02-b96a-9ef61503819e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.932247 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dcdede93-c2d5-4e02-b96a-9ef61503819e" (UID: "dcdede93-c2d5-4e02-b96a-9ef61503819e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.940458 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-config" (OuterVolumeSpecName: "config") pod "dcdede93-c2d5-4e02-b96a-9ef61503819e" (UID: "dcdede93-c2d5-4e02-b96a-9ef61503819e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.946929 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fws2l\" (UniqueName: \"kubernetes.io/projected/dcdede93-c2d5-4e02-b96a-9ef61503819e-kube-api-access-fws2l\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.947070 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.947149 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.947224 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.953265 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dcdede93-c2d5-4e02-b96a-9ef61503819e" (UID: "dcdede93-c2d5-4e02-b96a-9ef61503819e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.959523 4822 generic.go:334] "Generic (PLEG): container finished" podID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerID="c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02" exitCode=0 Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.959675 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.960921 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dcdede93-c2d5-4e02-b96a-9ef61503819e" (UID: "dcdede93-c2d5-4e02-b96a-9ef61503819e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:46 crc kubenswrapper[4822]: I1201 07:10:46.961917 4822 generic.go:334] "Generic (PLEG): container finished" podID="7bfdfa19-8696-4747-9d07-004881884560" containerID="baf2a99261478a2f3857de93be4d05907ef46c2c84bd3ec0daf62e5b1a623778" exitCode=0 Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.050436 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.050894 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dcdede93-c2d5-4e02-b96a-9ef61503819e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.053698 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cdbjg" podStartSLOduration=3.053681406 podStartE2EDuration="3.053681406s" podCreationTimestamp="2025-12-01 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:47.029846446 +0000 UTC m=+1202.350654132" watchObservedRunningTime="2025-12-01 07:10:47.053681406 +0000 UTC m=+1202.374489092" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.070982 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e1df4f0-59b0-4469-94a3-fb8c07516564" path="/var/lib/kubelet/pods/3e1df4f0-59b0-4469-94a3-fb8c07516564/volumes" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074096 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xppc9" event={"ID":"fb9401cf-62a5-407e-8ac0-88d0eecc830d","Type":"ContainerStarted","Data":"c3ef05bdb917bb04c191537d0836844b2c9dedacb5a630ea15dda06d5e73bb6b"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074240 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-mhhlz"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074260 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-s26qn"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074281 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7wprf"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074302 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" event={"ID":"dcdede93-c2d5-4e02-b96a-9ef61503819e","Type":"ContainerDied","Data":"c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074472 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-8sljt" event={"ID":"dcdede93-c2d5-4e02-b96a-9ef61503819e","Type":"ContainerDied","Data":"fd4ba30c7c21d6f81ace08a3e4984bcf145fa9046d99f56044d2c1628d147b9a"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074485 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" event={"ID":"7bfdfa19-8696-4747-9d07-004881884560","Type":"ContainerDied","Data":"baf2a99261478a2f3857de93be4d05907ef46c2c84bd3ec0daf62e5b1a623778"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074502 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" event={"ID":"7bfdfa19-8696-4747-9d07-004881884560","Type":"ContainerStarted","Data":"51f22b3121973de344c61a9abe6a4c44f4a9cd9ed6023ffcd974214cd9e8e8a1"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074522 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cdbjg" event={"ID":"112e181f-4b31-4ac6-b92c-0501e0fdfe1d","Type":"ContainerStarted","Data":"52174cf0388de8aa36102ca3e1e1ec5cee4d830be4c181aed635624923e8d0e1"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.074533 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cdbjg" event={"ID":"112e181f-4b31-4ac6-b92c-0501e0fdfe1d","Type":"ContainerStarted","Data":"ff85fa0baf44177d7eb52585e3255a259108844c6679a54aa32f0031d2c9392d"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.075814 4822 scope.go:117] "RemoveContainer" containerID="c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.119764 4822 scope.go:117] "RemoveContainer" containerID="c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.166044 4822 scope.go:117] "RemoveContainer" containerID="c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02" Dec 01 07:10:47 crc kubenswrapper[4822]: E1201 07:10:47.181986 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02\": container with ID starting with c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02 not found: ID does not exist" containerID="c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.182036 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02"} err="failed to get container status \"c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02\": rpc error: code = NotFound desc = could not find container \"c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02\": container with ID starting with c5e0c3f7fac391b55d339dcbc5ee65601d1fe02bf5486cbb9bf4958d17803e02 not found: ID does not exist" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.182067 4822 scope.go:117] "RemoveContainer" containerID="c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422" Dec 01 07:10:47 crc kubenswrapper[4822]: E1201 07:10:47.182542 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422\": container with ID starting with c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422 not found: ID does not exist" containerID="c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.182587 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422"} err="failed to get container status \"c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422\": rpc error: code = NotFound desc = could not find container \"c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422\": container with ID starting with c4635fe26814449695e25b8c6b37bdbc6096ba64aeb264b27734b43533dfd422 not found: ID does not exist" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.184928 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-zndwr"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.202239 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.310110 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-8sljt"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.330838 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-8sljt"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.437789 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.514084 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.567986 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-swift-storage-0\") pod \"7bfdfa19-8696-4747-9d07-004881884560\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.568087 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwqfg\" (UniqueName: \"kubernetes.io/projected/7bfdfa19-8696-4747-9d07-004881884560-kube-api-access-pwqfg\") pod \"7bfdfa19-8696-4747-9d07-004881884560\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.568109 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-nb\") pod \"7bfdfa19-8696-4747-9d07-004881884560\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.568139 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-config\") pod \"7bfdfa19-8696-4747-9d07-004881884560\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.568176 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-svc\") pod \"7bfdfa19-8696-4747-9d07-004881884560\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.568264 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-sb\") pod \"7bfdfa19-8696-4747-9d07-004881884560\" (UID: \"7bfdfa19-8696-4747-9d07-004881884560\") " Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.580893 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bfdfa19-8696-4747-9d07-004881884560-kube-api-access-pwqfg" (OuterVolumeSpecName: "kube-api-access-pwqfg") pod "7bfdfa19-8696-4747-9d07-004881884560" (UID: "7bfdfa19-8696-4747-9d07-004881884560"). InnerVolumeSpecName "kube-api-access-pwqfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.617162 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-config" (OuterVolumeSpecName: "config") pod "7bfdfa19-8696-4747-9d07-004881884560" (UID: "7bfdfa19-8696-4747-9d07-004881884560"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.617374 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7bfdfa19-8696-4747-9d07-004881884560" (UID: "7bfdfa19-8696-4747-9d07-004881884560"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.620594 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7bfdfa19-8696-4747-9d07-004881884560" (UID: "7bfdfa19-8696-4747-9d07-004881884560"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.623521 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7bfdfa19-8696-4747-9d07-004881884560" (UID: "7bfdfa19-8696-4747-9d07-004881884560"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.660590 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7bfdfa19-8696-4747-9d07-004881884560" (UID: "7bfdfa19-8696-4747-9d07-004881884560"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.660760 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.671833 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.672134 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.672225 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwqfg\" (UniqueName: \"kubernetes.io/projected/7bfdfa19-8696-4747-9d07-004881884560-kube-api-access-pwqfg\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.672282 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.672346 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.672420 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bfdfa19-8696-4747-9d07-004881884560-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.990658 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f9fc2d4-8719-4887-817a-5a75520483b6","Type":"ContainerStarted","Data":"aca8846ef82543612ac9fdb05c8e98af46a63ab89687eb7bc2a01e1974dcb331"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.996516 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" event={"ID":"7bfdfa19-8696-4747-9d07-004881884560","Type":"ContainerDied","Data":"51f22b3121973de344c61a9abe6a4c44f4a9cd9ed6023ffcd974214cd9e8e8a1"} Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.996575 4822 scope.go:117] "RemoveContainer" containerID="baf2a99261478a2f3857de93be4d05907ef46c2c84bd3ec0daf62e5b1a623778" Dec 01 07:10:47 crc kubenswrapper[4822]: I1201 07:10:47.996690 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-2z2x7" Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.007780 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-zndwr" event={"ID":"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0","Type":"ContainerStarted","Data":"1903828d715011c1a3f18ccdfb1ae9022623c71aafe22414bb123350378e4342"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.020710 4822 generic.go:334] "Generic (PLEG): container finished" podID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerID="4a4012d67e9e9ab05aedc0b9fd3ce776d571a637172f17702a968378c4b88abf" exitCode=0 Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.020809 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-s26qn" event={"ID":"d28f5670-3f16-470f-8a83-a1dcaca7bce4","Type":"ContainerDied","Data":"4a4012d67e9e9ab05aedc0b9fd3ce776d571a637172f17702a968378c4b88abf"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.020836 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-s26qn" event={"ID":"d28f5670-3f16-470f-8a83-a1dcaca7bce4","Type":"ContainerStarted","Data":"2b2c8781222661d9dc65cc344c748402ef0d9d48147b9d234e56ad3e822876bf"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.033927 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerStarted","Data":"f5dfc0eafa62543ad16188b1fe4ccab079d6cbc6a2f0a270e0a8b607b89744e6"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.044151 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4df73001-6ac3-4080-ba58-a7779e8c03d1","Type":"ContainerStarted","Data":"6107dfc5d255278028ae4da177715f93ad20069a2d58ec9264a624af6549daa8"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.045731 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wprf" event={"ID":"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693","Type":"ContainerStarted","Data":"2147726a83d08adca032521f99ccabbea481a1946e5f7244c2c73261909e548b"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.069310 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mhhlz" event={"ID":"eb723787-e103-4feb-9aba-df1a78059e4c","Type":"ContainerStarted","Data":"977351d0eec5eede4e16b4c71c3f7da7b277de2618a05bde7b6e5dd4825cbde2"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.069457 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mhhlz" event={"ID":"eb723787-e103-4feb-9aba-df1a78059e4c","Type":"ContainerStarted","Data":"1ed23fa2a2860c6ebc16a3a067512b698088611b953ceac10893437cebb8b8e1"} Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.078782 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-2z2x7"] Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.094718 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-2z2x7"] Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.156963 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-mhhlz" podStartSLOduration=3.156935248 podStartE2EDuration="3.156935248s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:48.143251554 +0000 UTC m=+1203.464059240" watchObservedRunningTime="2025-12-01 07:10:48.156935248 +0000 UTC m=+1203.477742934" Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.433368 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.539684 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.645900 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.971728 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bfdfa19-8696-4747-9d07-004881884560" path="/var/lib/kubelet/pods/7bfdfa19-8696-4747-9d07-004881884560/volumes" Dec 01 07:10:48 crc kubenswrapper[4822]: I1201 07:10:48.972982 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" path="/var/lib/kubelet/pods/dcdede93-c2d5-4e02-b96a-9ef61503819e/volumes" Dec 01 07:10:49 crc kubenswrapper[4822]: I1201 07:10:49.104155 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f9fc2d4-8719-4887-817a-5a75520483b6","Type":"ContainerStarted","Data":"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170"} Dec 01 07:10:49 crc kubenswrapper[4822]: I1201 07:10:49.111227 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4df73001-6ac3-4080-ba58-a7779e8c03d1","Type":"ContainerStarted","Data":"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5"} Dec 01 07:10:49 crc kubenswrapper[4822]: I1201 07:10:49.120799 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-s26qn" event={"ID":"d28f5670-3f16-470f-8a83-a1dcaca7bce4","Type":"ContainerStarted","Data":"c4aec2584ebe7a419b01daa88d824e0c5e5215037d3af31ab07615090bcb2dae"} Dec 01 07:10:49 crc kubenswrapper[4822]: I1201 07:10:49.120951 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:49 crc kubenswrapper[4822]: I1201 07:10:49.150345 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f645789c-s26qn" podStartSLOduration=4.150322814 podStartE2EDuration="4.150322814s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:49.148882023 +0000 UTC m=+1204.469689709" watchObservedRunningTime="2025-12-01 07:10:49.150322814 +0000 UTC m=+1204.471130510" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.145757 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f9fc2d4-8719-4887-817a-5a75520483b6","Type":"ContainerStarted","Data":"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201"} Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.146097 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-log" containerID="cri-o://2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170" gracePeriod=30 Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.146439 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-httpd" containerID="cri-o://bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201" gracePeriod=30 Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.150917 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4df73001-6ac3-4080-ba58-a7779e8c03d1","Type":"ContainerStarted","Data":"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5"} Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.150980 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-log" containerID="cri-o://526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5" gracePeriod=30 Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.151037 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-httpd" containerID="cri-o://1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5" gracePeriod=30 Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.181816 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.181784869 podStartE2EDuration="5.181784869s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:50.167657942 +0000 UTC m=+1205.488465648" watchObservedRunningTime="2025-12-01 07:10:50.181784869 +0000 UTC m=+1205.502592555" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.202093 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.202072739 podStartE2EDuration="5.202072739s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:10:50.19642039 +0000 UTC m=+1205.517228066" watchObservedRunningTime="2025-12-01 07:10:50.202072739 +0000 UTC m=+1205.522880415" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.777403 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.862597 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-httpd-run\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.862656 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-logs\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.862739 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-combined-ca-bundle\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.862924 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chsdl\" (UniqueName: \"kubernetes.io/projected/4df73001-6ac3-4080-ba58-a7779e8c03d1-kube-api-access-chsdl\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.862965 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-config-data\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.863001 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.863024 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-scripts\") pod \"4df73001-6ac3-4080-ba58-a7779e8c03d1\" (UID: \"4df73001-6ac3-4080-ba58-a7779e8c03d1\") " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.863164 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.863181 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-logs" (OuterVolumeSpecName: "logs") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.863587 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.863609 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df73001-6ac3-4080-ba58-a7779e8c03d1-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.872268 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-scripts" (OuterVolumeSpecName: "scripts") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.875920 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.876075 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4df73001-6ac3-4080-ba58-a7779e8c03d1-kube-api-access-chsdl" (OuterVolumeSpecName: "kube-api-access-chsdl") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "kube-api-access-chsdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.940470 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-config-data" (OuterVolumeSpecName: "config-data") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.940962 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.956176 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4df73001-6ac3-4080-ba58-a7779e8c03d1" (UID: "4df73001-6ac3-4080-ba58-a7779e8c03d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.966054 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chsdl\" (UniqueName: \"kubernetes.io/projected/4df73001-6ac3-4080-ba58-a7779e8c03d1-kube-api-access-chsdl\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.966693 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.966848 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.966923 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.966975 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df73001-6ac3-4080-ba58-a7779e8c03d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:50 crc kubenswrapper[4822]: I1201 07:10:50.983738 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068291 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-scripts\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068365 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-combined-ca-bundle\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068426 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068541 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-config-data\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068595 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-httpd-run\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068638 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg2hh\" (UniqueName: \"kubernetes.io/projected/1f9fc2d4-8719-4887-817a-5a75520483b6-kube-api-access-qg2hh\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.068672 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-logs\") pod \"1f9fc2d4-8719-4887-817a-5a75520483b6\" (UID: \"1f9fc2d4-8719-4887-817a-5a75520483b6\") " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.069366 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.069698 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-logs" (OuterVolumeSpecName: "logs") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.070127 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.070144 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.070157 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9fc2d4-8719-4887-817a-5a75520483b6-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.073436 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f9fc2d4-8719-4887-817a-5a75520483b6-kube-api-access-qg2hh" (OuterVolumeSpecName: "kube-api-access-qg2hh") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "kube-api-access-qg2hh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.073969 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.077521 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-scripts" (OuterVolumeSpecName: "scripts") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.100885 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.132494 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-config-data" (OuterVolumeSpecName: "config-data") pod "1f9fc2d4-8719-4887-817a-5a75520483b6" (UID: "1f9fc2d4-8719-4887-817a-5a75520483b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164119 4822 generic.go:334] "Generic (PLEG): container finished" podID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerID="bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201" exitCode=143 Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164171 4822 generic.go:334] "Generic (PLEG): container finished" podID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerID="2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170" exitCode=143 Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164211 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164269 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f9fc2d4-8719-4887-817a-5a75520483b6","Type":"ContainerDied","Data":"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164307 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f9fc2d4-8719-4887-817a-5a75520483b6","Type":"ContainerDied","Data":"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164322 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f9fc2d4-8719-4887-817a-5a75520483b6","Type":"ContainerDied","Data":"aca8846ef82543612ac9fdb05c8e98af46a63ab89687eb7bc2a01e1974dcb331"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.164343 4822 scope.go:117] "RemoveContainer" containerID="bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.166693 4822 generic.go:334] "Generic (PLEG): container finished" podID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerID="1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5" exitCode=143 Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.166725 4822 generic.go:334] "Generic (PLEG): container finished" podID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerID="526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5" exitCode=143 Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.166765 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4df73001-6ac3-4080-ba58-a7779e8c03d1","Type":"ContainerDied","Data":"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.166791 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4df73001-6ac3-4080-ba58-a7779e8c03d1","Type":"ContainerDied","Data":"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.166806 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4df73001-6ac3-4080-ba58-a7779e8c03d1","Type":"ContainerDied","Data":"6107dfc5d255278028ae4da177715f93ad20069a2d58ec9264a624af6549daa8"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.166878 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.170386 4822 generic.go:334] "Generic (PLEG): container finished" podID="112e181f-4b31-4ac6-b92c-0501e0fdfe1d" containerID="52174cf0388de8aa36102ca3e1e1ec5cee4d830be4c181aed635624923e8d0e1" exitCode=0 Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.170430 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cdbjg" event={"ID":"112e181f-4b31-4ac6-b92c-0501e0fdfe1d","Type":"ContainerDied","Data":"52174cf0388de8aa36102ca3e1e1ec5cee4d830be4c181aed635624923e8d0e1"} Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.171743 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.171785 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg2hh\" (UniqueName: \"kubernetes.io/projected/1f9fc2d4-8719-4887-817a-5a75520483b6-kube-api-access-qg2hh\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.171799 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.171812 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9fc2d4-8719-4887-817a-5a75520483b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.171849 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.189428 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.246285 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.268480 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.275354 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.302318 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.309943 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310670 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerName="dnsmasq-dns" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310694 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerName="dnsmasq-dns" Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310708 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-httpd" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310718 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-httpd" Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310741 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-log" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310749 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-log" Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310767 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bfdfa19-8696-4747-9d07-004881884560" containerName="init" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310776 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bfdfa19-8696-4747-9d07-004881884560" containerName="init" Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310792 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerName="init" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310799 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerName="init" Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310812 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-log" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310820 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-log" Dec 01 07:10:51 crc kubenswrapper[4822]: E1201 07:10:51.310836 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-httpd" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.310845 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-httpd" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.311049 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-httpd" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.311071 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bfdfa19-8696-4747-9d07-004881884560" containerName="init" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.311081 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-httpd" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.311098 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcdede93-c2d5-4e02-b96a-9ef61503819e" containerName="dnsmasq-dns" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.311113 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" containerName="glance-log" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.311122 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" containerName="glance-log" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.312036 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.315499 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.315920 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-zzv5d" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.316584 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.338242 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.349969 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.365304 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.366781 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.376813 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.378918 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386094 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-config-data\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386195 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386384 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386460 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-scripts\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386561 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386705 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-logs\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.386727 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r9kd\" (UniqueName: \"kubernetes.io/projected/43dea7cf-bd0b-475b-94ec-12a1a141b803-kube-api-access-4r9kd\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489111 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489211 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489279 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfj42\" (UniqueName: \"kubernetes.io/projected/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-kube-api-access-qfj42\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489305 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-logs\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489406 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r9kd\" (UniqueName: \"kubernetes.io/projected/43dea7cf-bd0b-475b-94ec-12a1a141b803-kube-api-access-4r9kd\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489524 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-config-data\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489601 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489742 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489827 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489871 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.489892 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-scripts\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.490180 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.490212 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-logs\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.490285 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.491565 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.491619 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-logs\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.491708 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.495098 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-config-data\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.495597 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-scripts\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.504073 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.506784 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r9kd\" (UniqueName: \"kubernetes.io/projected/43dea7cf-bd0b-475b-94ec-12a1a141b803-kube-api-access-4r9kd\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.515955 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.595921 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfj42\" (UniqueName: \"kubernetes.io/projected/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-kube-api-access-qfj42\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.596079 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.596120 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.596151 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.596175 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-logs\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.596210 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.596254 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.598691 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.602155 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-logs\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.604444 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.604982 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.610126 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.628285 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.635457 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfj42\" (UniqueName: \"kubernetes.io/projected/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-kube-api-access-qfj42\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.678050 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.683785 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:10:51 crc kubenswrapper[4822]: I1201 07:10:51.704530 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:10:52 crc kubenswrapper[4822]: I1201 07:10:52.402535 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:10:52 crc kubenswrapper[4822]: I1201 07:10:52.500876 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:10:52 crc kubenswrapper[4822]: I1201 07:10:52.963582 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f9fc2d4-8719-4887-817a-5a75520483b6" path="/var/lib/kubelet/pods/1f9fc2d4-8719-4887-817a-5a75520483b6/volumes" Dec 01 07:10:52 crc kubenswrapper[4822]: I1201 07:10:52.964447 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4df73001-6ac3-4080-ba58-a7779e8c03d1" path="/var/lib/kubelet/pods/4df73001-6ac3-4080-ba58-a7779e8c03d1/volumes" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.012004 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.111067 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-scripts\") pod \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.111331 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-combined-ca-bundle\") pod \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.111409 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6sl7\" (UniqueName: \"kubernetes.io/projected/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-kube-api-access-h6sl7\") pod \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.111465 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-credential-keys\") pod \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.111575 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-config-data\") pod \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.111673 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-fernet-keys\") pod \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\" (UID: \"112e181f-4b31-4ac6-b92c-0501e0fdfe1d\") " Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.121708 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "112e181f-4b31-4ac6-b92c-0501e0fdfe1d" (UID: "112e181f-4b31-4ac6-b92c-0501e0fdfe1d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.121753 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "112e181f-4b31-4ac6-b92c-0501e0fdfe1d" (UID: "112e181f-4b31-4ac6-b92c-0501e0fdfe1d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.126808 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-kube-api-access-h6sl7" (OuterVolumeSpecName: "kube-api-access-h6sl7") pod "112e181f-4b31-4ac6-b92c-0501e0fdfe1d" (UID: "112e181f-4b31-4ac6-b92c-0501e0fdfe1d"). InnerVolumeSpecName "kube-api-access-h6sl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.126936 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-scripts" (OuterVolumeSpecName: "scripts") pod "112e181f-4b31-4ac6-b92c-0501e0fdfe1d" (UID: "112e181f-4b31-4ac6-b92c-0501e0fdfe1d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.138644 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "112e181f-4b31-4ac6-b92c-0501e0fdfe1d" (UID: "112e181f-4b31-4ac6-b92c-0501e0fdfe1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.140797 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-config-data" (OuterVolumeSpecName: "config-data") pod "112e181f-4b31-4ac6-b92c-0501e0fdfe1d" (UID: "112e181f-4b31-4ac6-b92c-0501e0fdfe1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.212147 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cdbjg" event={"ID":"112e181f-4b31-4ac6-b92c-0501e0fdfe1d","Type":"ContainerDied","Data":"ff85fa0baf44177d7eb52585e3255a259108844c6679a54aa32f0031d2c9392d"} Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.212209 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff85fa0baf44177d7eb52585e3255a259108844c6679a54aa32f0031d2c9392d" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.212288 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cdbjg" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.220244 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.220280 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6sl7\" (UniqueName: \"kubernetes.io/projected/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-kube-api-access-h6sl7\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.220292 4822 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.220301 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.220310 4822 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:55 crc kubenswrapper[4822]: I1201 07:10:55.220319 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/112e181f-4b31-4ac6-b92c-0501e0fdfe1d-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.010674 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.080497 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-nhz8d"] Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.080860 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" containerID="cri-o://75bccd05fdf56db9e946daf05d599a452eb287174556653e7d9bb6d54491f25b" gracePeriod=10 Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.159079 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cdbjg"] Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.168904 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cdbjg"] Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.240363 4822 generic.go:334] "Generic (PLEG): container finished" podID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerID="75bccd05fdf56db9e946daf05d599a452eb287174556653e7d9bb6d54491f25b" exitCode=0 Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.240409 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" event={"ID":"de738eee-1f47-42ae-be49-a65d2fe8ea3e","Type":"ContainerDied","Data":"75bccd05fdf56db9e946daf05d599a452eb287174556653e7d9bb6d54491f25b"} Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.246678 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-t6rkn"] Dec 01 07:10:56 crc kubenswrapper[4822]: E1201 07:10:56.247139 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112e181f-4b31-4ac6-b92c-0501e0fdfe1d" containerName="keystone-bootstrap" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.247156 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="112e181f-4b31-4ac6-b92c-0501e0fdfe1d" containerName="keystone-bootstrap" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.247338 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="112e181f-4b31-4ac6-b92c-0501e0fdfe1d" containerName="keystone-bootstrap" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.251969 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.254154 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.256432 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.256756 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.257021 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.257152 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dcwzn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.262351 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-t6rkn"] Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.342725 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-scripts\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.342799 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.342953 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh5cc\" (UniqueName: \"kubernetes.io/projected/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-kube-api-access-bh5cc\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.343246 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-credential-keys\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.343382 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-fernet-keys\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.343444 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-combined-ca-bundle\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.446369 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-fernet-keys\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.446897 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-combined-ca-bundle\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.447139 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-scripts\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.447235 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.447385 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh5cc\" (UniqueName: \"kubernetes.io/projected/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-kube-api-access-bh5cc\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.447523 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-credential-keys\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.452437 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.454988 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-fernet-keys\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.455656 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-scripts\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.455911 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-credential-keys\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.457243 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-combined-ca-bundle\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.468530 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh5cc\" (UniqueName: \"kubernetes.io/projected/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-kube-api-access-bh5cc\") pod \"keystone-bootstrap-t6rkn\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.586887 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:10:56 crc kubenswrapper[4822]: I1201 07:10:56.975416 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="112e181f-4b31-4ac6-b92c-0501e0fdfe1d" path="/var/lib/kubelet/pods/112e181f-4b31-4ac6-b92c-0501e0fdfe1d/volumes" Dec 01 07:10:58 crc kubenswrapper[4822]: I1201 07:10:58.827008 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 01 07:10:59 crc kubenswrapper[4822]: I1201 07:10:59.143230 4822 scope.go:117] "RemoveContainer" containerID="2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170" Dec 01 07:10:59 crc kubenswrapper[4822]: I1201 07:10:59.776783 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:11:05 crc kubenswrapper[4822]: I1201 07:11:05.338511 4822 generic.go:334] "Generic (PLEG): container finished" podID="eb723787-e103-4feb-9aba-df1a78059e4c" containerID="977351d0eec5eede4e16b4c71c3f7da7b277de2618a05bde7b6e5dd4825cbde2" exitCode=0 Dec 01 07:11:05 crc kubenswrapper[4822]: I1201 07:11:05.338667 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mhhlz" event={"ID":"eb723787-e103-4feb-9aba-df1a78059e4c","Type":"ContainerDied","Data":"977351d0eec5eede4e16b4c71c3f7da7b277de2618a05bde7b6e5dd4825cbde2"} Dec 01 07:11:08 crc kubenswrapper[4822]: E1201 07:11:08.129733 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23" Dec 01 07:11:08 crc kubenswrapper[4822]: E1201 07:11:08.130431 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n576h66ch687h664h565h559h54fhc9h5c7h8chbdhc8hb6h58fh5fchc7h77h596h75h7fh5ddh5cfh687h5fh664h57h598h579hfch685h7bhd9q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ldk4q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(c8f5b07d-19c7-4b1a-90a9-6b19bb76e162): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:11:08 crc kubenswrapper[4822]: E1201 07:11:08.669691 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb" Dec 01 07:11:08 crc kubenswrapper[4822]: E1201 07:11:08.669859 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mrrkq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-zndwr_openstack(b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:11:08 crc kubenswrapper[4822]: E1201 07:11:08.671314 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-zndwr" podUID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" Dec 01 07:11:08 crc kubenswrapper[4822]: W1201 07:11:08.781769 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43dea7cf_bd0b_475b_94ec_12a1a141b803.slice/crio-4195fa6256a0d69dc1d1132965a8c64f7e855569004f9aa81e4e25a96e0516af WatchSource:0}: Error finding container 4195fa6256a0d69dc1d1132965a8c64f7e855569004f9aa81e4e25a96e0516af: Status 404 returned error can't find the container with id 4195fa6256a0d69dc1d1132965a8c64f7e855569004f9aa81e4e25a96e0516af Dec 01 07:11:08 crc kubenswrapper[4822]: I1201 07:11:08.827733 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: i/o timeout" Dec 01 07:11:08 crc kubenswrapper[4822]: I1201 07:11:08.896334 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:11:08 crc kubenswrapper[4822]: I1201 07:11:08.900710 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.022865 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9zpd\" (UniqueName: \"kubernetes.io/projected/de738eee-1f47-42ae-be49-a65d2fe8ea3e-kube-api-access-x9zpd\") pod \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.022912 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-nb\") pod \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.023033 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz8lf\" (UniqueName: \"kubernetes.io/projected/eb723787-e103-4feb-9aba-df1a78059e4c-kube-api-access-kz8lf\") pod \"eb723787-e103-4feb-9aba-df1a78059e4c\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.024296 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-dns-svc\") pod \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.024389 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-config\") pod \"eb723787-e103-4feb-9aba-df1a78059e4c\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.024542 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-sb\") pod \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.024678 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-combined-ca-bundle\") pod \"eb723787-e103-4feb-9aba-df1a78059e4c\" (UID: \"eb723787-e103-4feb-9aba-df1a78059e4c\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.025041 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-config\") pod \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\" (UID: \"de738eee-1f47-42ae-be49-a65d2fe8ea3e\") " Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.029073 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de738eee-1f47-42ae-be49-a65d2fe8ea3e-kube-api-access-x9zpd" (OuterVolumeSpecName: "kube-api-access-x9zpd") pod "de738eee-1f47-42ae-be49-a65d2fe8ea3e" (UID: "de738eee-1f47-42ae-be49-a65d2fe8ea3e"). InnerVolumeSpecName "kube-api-access-x9zpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.029299 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb723787-e103-4feb-9aba-df1a78059e4c-kube-api-access-kz8lf" (OuterVolumeSpecName: "kube-api-access-kz8lf") pod "eb723787-e103-4feb-9aba-df1a78059e4c" (UID: "eb723787-e103-4feb-9aba-df1a78059e4c"). InnerVolumeSpecName "kube-api-access-kz8lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.049840 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-config" (OuterVolumeSpecName: "config") pod "eb723787-e103-4feb-9aba-df1a78059e4c" (UID: "eb723787-e103-4feb-9aba-df1a78059e4c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.065516 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb723787-e103-4feb-9aba-df1a78059e4c" (UID: "eb723787-e103-4feb-9aba-df1a78059e4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.079053 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "de738eee-1f47-42ae-be49-a65d2fe8ea3e" (UID: "de738eee-1f47-42ae-be49-a65d2fe8ea3e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.082048 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-config" (OuterVolumeSpecName: "config") pod "de738eee-1f47-42ae-be49-a65d2fe8ea3e" (UID: "de738eee-1f47-42ae-be49-a65d2fe8ea3e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.093480 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "de738eee-1f47-42ae-be49-a65d2fe8ea3e" (UID: "de738eee-1f47-42ae-be49-a65d2fe8ea3e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.095376 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "de738eee-1f47-42ae-be49-a65d2fe8ea3e" (UID: "de738eee-1f47-42ae-be49-a65d2fe8ea3e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133075 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133123 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133137 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133149 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9zpd\" (UniqueName: \"kubernetes.io/projected/de738eee-1f47-42ae-be49-a65d2fe8ea3e-kube-api-access-x9zpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133164 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133177 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz8lf\" (UniqueName: \"kubernetes.io/projected/eb723787-e103-4feb-9aba-df1a78059e4c-kube-api-access-kz8lf\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133190 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de738eee-1f47-42ae-be49-a65d2fe8ea3e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.133199 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/eb723787-e103-4feb-9aba-df1a78059e4c-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.390966 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" event={"ID":"de738eee-1f47-42ae-be49-a65d2fe8ea3e","Type":"ContainerDied","Data":"90f49e6fd14e9fa620ef061317805d458724e6f50c007be364fe1ff073490986"} Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.391011 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.392027 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43dea7cf-bd0b-475b-94ec-12a1a141b803","Type":"ContainerStarted","Data":"4195fa6256a0d69dc1d1132965a8c64f7e855569004f9aa81e4e25a96e0516af"} Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.393695 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-mhhlz" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.396614 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-mhhlz" event={"ID":"eb723787-e103-4feb-9aba-df1a78059e4c","Type":"ContainerDied","Data":"1ed23fa2a2860c6ebc16a3a067512b698088611b953ceac10893437cebb8b8e1"} Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.396645 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ed23fa2a2860c6ebc16a3a067512b698088611b953ceac10893437cebb8b8e1" Dec 01 07:11:09 crc kubenswrapper[4822]: E1201 07:11:09.397268 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb\\\"\"" pod="openstack/barbican-db-sync-zndwr" podUID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.448019 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-nhz8d"] Dec 01 07:11:09 crc kubenswrapper[4822]: I1201 07:11:09.461246 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-nhz8d"] Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.070831 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-mnlmw"] Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.071458 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="init" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.071475 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="init" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.071512 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.071519 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.071531 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb723787-e103-4feb-9aba-df1a78059e4c" containerName="neutron-db-sync" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.071537 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb723787-e103-4feb-9aba-df1a78059e4c" containerName="neutron-db-sync" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.071711 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb723787-e103-4feb-9aba-df1a78059e4c" containerName="neutron-db-sync" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.071729 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.072612 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.100419 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-mnlmw"] Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.149850 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-config\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.149894 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sztjs\" (UniqueName: \"kubernetes.io/projected/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-kube-api-access-sztjs\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.149926 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-nb\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.150003 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.150024 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-svc\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.150047 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-swift-storage-0\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.176537 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-64649776b-r955c"] Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.178415 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.184359 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.184617 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.184858 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.185683 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8bxmm" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.206888 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64649776b-r955c"] Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251380 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6skdb\" (UniqueName: \"kubernetes.io/projected/082bea40-1a34-4711-8c74-dbf325eb5658-kube-api-access-6skdb\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251429 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-config\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251469 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-config\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251488 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sztjs\" (UniqueName: \"kubernetes.io/projected/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-kube-api-access-sztjs\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251512 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-nb\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251707 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-ovndb-tls-certs\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251801 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-httpd-config\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251916 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.251978 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-svc\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.252007 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-swift-storage-0\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.252044 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-combined-ca-bundle\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.252393 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-nb\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.252848 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-config\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.252851 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.253109 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-swift-storage-0\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.253165 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-svc\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.290189 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sztjs\" (UniqueName: \"kubernetes.io/projected/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-kube-api-access-sztjs\") pod \"dnsmasq-dns-7f677dd449-mnlmw\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.354957 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-ovndb-tls-certs\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.355004 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-httpd-config\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.355053 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-combined-ca-bundle\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.355107 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6skdb\" (UniqueName: \"kubernetes.io/projected/082bea40-1a34-4711-8c74-dbf325eb5658-kube-api-access-6skdb\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.355133 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-config\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.359542 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-ovndb-tls-certs\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.361934 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-config\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.369386 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-combined-ca-bundle\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.372958 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-httpd-config\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.375713 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6skdb\" (UniqueName: \"kubernetes.io/projected/082bea40-1a34-4711-8c74-dbf325eb5658-kube-api-access-6skdb\") pod \"neutron-64649776b-r955c\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.403966 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.506081 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.796508 4822 scope.go:117] "RemoveContainer" containerID="bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.796897 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201\": container with ID starting with bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201 not found: ID does not exist" containerID="bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.796973 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201"} err="failed to get container status \"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201\": rpc error: code = NotFound desc = could not find container \"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201\": container with ID starting with bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201 not found: ID does not exist" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.796996 4822 scope.go:117] "RemoveContainer" containerID="2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.797512 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170\": container with ID starting with 2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170 not found: ID does not exist" containerID="2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.797536 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170"} err="failed to get container status \"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170\": rpc error: code = NotFound desc = could not find container \"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170\": container with ID starting with 2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170 not found: ID does not exist" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.797623 4822 scope.go:117] "RemoveContainer" containerID="bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.797870 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201"} err="failed to get container status \"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201\": rpc error: code = NotFound desc = could not find container \"bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201\": container with ID starting with bbb387d17621bd9874ed27cd036058a2fd4e5dac9ab206c1bac9ecc0c5b83201 not found: ID does not exist" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.797896 4822 scope.go:117] "RemoveContainer" containerID="2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.798106 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170"} err="failed to get container status \"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170\": rpc error: code = NotFound desc = could not find container \"2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170\": container with ID starting with 2e004594f38cd1980c1ddab556b538c24ee66771872471d61ca2abbaa3fbb170 not found: ID does not exist" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.798135 4822 scope.go:117] "RemoveContainer" containerID="1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.839806 4822 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.840111 4822 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-78cv6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-7wprf_openstack(ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:11:10 crc kubenswrapper[4822]: E1201 07:11:10.841706 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-7wprf" podUID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" Dec 01 07:11:10 crc kubenswrapper[4822]: I1201 07:11:10.975075 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" path="/var/lib/kubelet/pods/de738eee-1f47-42ae-be49-a65d2fe8ea3e/volumes" Dec 01 07:11:11 crc kubenswrapper[4822]: E1201 07:11:11.440931 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0\\\"\"" pod="openstack/cinder-db-sync-7wprf" podUID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.595945 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.611184 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-t6rkn"] Dec 01 07:11:11 crc kubenswrapper[4822]: W1201 07:11:11.744583 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ddaa4a4_d5dc_4046_89b8_6a335bfd6e77.slice/crio-dcf859362264e8e9d5b8d16a396d58a5c38bccf0acf48fcc630839a3b57cb3aa WatchSource:0}: Error finding container dcf859362264e8e9d5b8d16a396d58a5c38bccf0acf48fcc630839a3b57cb3aa: Status 404 returned error can't find the container with id dcf859362264e8e9d5b8d16a396d58a5c38bccf0acf48fcc630839a3b57cb3aa Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.783089 4822 scope.go:117] "RemoveContainer" containerID="526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.959968 4822 scope.go:117] "RemoveContainer" containerID="1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5" Dec 01 07:11:11 crc kubenswrapper[4822]: E1201 07:11:11.961482 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5\": container with ID starting with 1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5 not found: ID does not exist" containerID="1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.961563 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5"} err="failed to get container status \"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5\": rpc error: code = NotFound desc = could not find container \"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5\": container with ID starting with 1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5 not found: ID does not exist" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.961619 4822 scope.go:117] "RemoveContainer" containerID="526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5" Dec 01 07:11:11 crc kubenswrapper[4822]: E1201 07:11:11.968731 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5\": container with ID starting with 526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5 not found: ID does not exist" containerID="526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.968789 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5"} err="failed to get container status \"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5\": rpc error: code = NotFound desc = could not find container \"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5\": container with ID starting with 526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5 not found: ID does not exist" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.968821 4822 scope.go:117] "RemoveContainer" containerID="1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.970004 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5"} err="failed to get container status \"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5\": rpc error: code = NotFound desc = could not find container \"1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5\": container with ID starting with 1d4afa7ee1e928f1ac34c768cdf410320c8ac5840c8899c05aa3016b68934bd5 not found: ID does not exist" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.970072 4822 scope.go:117] "RemoveContainer" containerID="526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.970804 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5"} err="failed to get container status \"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5\": rpc error: code = NotFound desc = could not find container \"526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5\": container with ID starting with 526769c5a98857ab2d85485775cb8ee137bca012bb89d2da22632bd8580a5dd5 not found: ID does not exist" Dec 01 07:11:11 crc kubenswrapper[4822]: I1201 07:11:11.970830 4822 scope.go:117] "RemoveContainer" containerID="75bccd05fdf56db9e946daf05d599a452eb287174556653e7d9bb6d54491f25b" Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.081334 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-mnlmw"] Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.117383 4822 scope.go:117] "RemoveContainer" containerID="87b9f05058a99e738bfa21d7abf037109225480fd440e2e419d20c8faa67000a" Dec 01 07:11:12 crc kubenswrapper[4822]: W1201 07:11:12.151785 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1b1dfd0_f21e_4d4a_8646_0ba6b168e24a.slice/crio-361ee547f2583050c1858170ea07374aebe48411d4f632dd18f2225388d6c223 WatchSource:0}: Error finding container 361ee547f2583050c1858170ea07374aebe48411d4f632dd18f2225388d6c223: Status 404 returned error can't find the container with id 361ee547f2583050c1858170ea07374aebe48411d4f632dd18f2225388d6c223 Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.472767 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64649776b-r955c"] Dec 01 07:11:12 crc kubenswrapper[4822]: W1201 07:11:12.476893 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod082bea40_1a34_4711_8c74_dbf325eb5658.slice/crio-2bed2e52b55108ed9d3499e9e71838fd0b60c87cd3c1b8ef8610a8da02e1029d WatchSource:0}: Error finding container 2bed2e52b55108ed9d3499e9e71838fd0b60c87cd3c1b8ef8610a8da02e1029d: Status 404 returned error can't find the container with id 2bed2e52b55108ed9d3499e9e71838fd0b60c87cd3c1b8ef8610a8da02e1029d Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.489212 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6rkn" event={"ID":"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77","Type":"ContainerStarted","Data":"a96f54c06ad4ed4cadbe27b287eb8cc8b5491ac4590fd3bfa726e20944e4f2fd"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.489260 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6rkn" event={"ID":"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77","Type":"ContainerStarted","Data":"dcf859362264e8e9d5b8d16a396d58a5c38bccf0acf48fcc630839a3b57cb3aa"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.495013 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerStarted","Data":"a41bde461beb827ce50b7240b0b6439c5b96badbdb4ac27de199c04c17f3e7ba"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.503079 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" event={"ID":"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a","Type":"ContainerStarted","Data":"361ee547f2583050c1858170ea07374aebe48411d4f632dd18f2225388d6c223"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.511922 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c23d1c4f-c3eb-46de-a125-9aa740adf6eb","Type":"ContainerStarted","Data":"e954df652cdfdb5927013a07862800dfcdc0b96262457cbdb8940f665bd1618a"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.516071 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xppc9" event={"ID":"fb9401cf-62a5-407e-8ac0-88d0eecc830d","Type":"ContainerStarted","Data":"cddecd865f20481a440a7f35a1049390c9555274514b9c1774c27cfcf78dfe1b"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.519070 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43dea7cf-bd0b-475b-94ec-12a1a141b803","Type":"ContainerStarted","Data":"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605"} Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.519791 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-t6rkn" podStartSLOduration=16.519776251 podStartE2EDuration="16.519776251s" podCreationTimestamp="2025-12-01 07:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:12.511147489 +0000 UTC m=+1227.831955175" watchObservedRunningTime="2025-12-01 07:11:12.519776251 +0000 UTC m=+1227.840583937" Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.567930 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-xppc9" podStartSLOduration=3.519208954 podStartE2EDuration="27.567911773s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="2025-12-01 07:10:46.731476665 +0000 UTC m=+1202.052284351" lastFinishedPulling="2025-12-01 07:11:10.780179484 +0000 UTC m=+1226.100987170" observedRunningTime="2025-12-01 07:11:12.561160864 +0000 UTC m=+1227.881968550" watchObservedRunningTime="2025-12-01 07:11:12.567911773 +0000 UTC m=+1227.888719459" Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.948193 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-54446c57b5-gw6bc"] Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.949958 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.952574 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.952745 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 01 07:11:12 crc kubenswrapper[4822]: I1201 07:11:12.977312 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54446c57b5-gw6bc"] Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.050954 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-combined-ca-bundle\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.050995 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-ovndb-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.051053 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-internal-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.051081 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-httpd-config\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.051125 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85r5r\" (UniqueName: \"kubernetes.io/projected/4e7558a6-6804-48af-b74d-394b7c5dd57e-kube-api-access-85r5r\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.051174 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-public-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.051192 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-config\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152135 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-public-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152179 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-config\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152227 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-combined-ca-bundle\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152250 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-ovndb-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152293 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-internal-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152318 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-httpd-config\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.152362 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85r5r\" (UniqueName: \"kubernetes.io/projected/4e7558a6-6804-48af-b74d-394b7c5dd57e-kube-api-access-85r5r\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.160373 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-httpd-config\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.168880 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-public-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.169439 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-ovndb-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.173041 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-config\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.179742 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-combined-ca-bundle\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.182096 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85r5r\" (UniqueName: \"kubernetes.io/projected/4e7558a6-6804-48af-b74d-394b7c5dd57e-kube-api-access-85r5r\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.203936 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-internal-tls-certs\") pod \"neutron-54446c57b5-gw6bc\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.301453 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.643438 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43dea7cf-bd0b-475b-94ec-12a1a141b803","Type":"ContainerStarted","Data":"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0"} Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.644046 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-log" containerID="cri-o://7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605" gracePeriod=30 Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.644693 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-httpd" containerID="cri-o://6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0" gracePeriod=30 Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.656285 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64649776b-r955c" event={"ID":"082bea40-1a34-4711-8c74-dbf325eb5658","Type":"ContainerStarted","Data":"6ffe94d73e5f581649887769ef4860b7e9405ae002148fd3a8016e28d98b728c"} Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.656328 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64649776b-r955c" event={"ID":"082bea40-1a34-4711-8c74-dbf325eb5658","Type":"ContainerStarted","Data":"5b2b1e1cb647203da9ad9d042fde915571a9f033d00ba41481a665be92106ea1"} Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.656341 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64649776b-r955c" event={"ID":"082bea40-1a34-4711-8c74-dbf325eb5658","Type":"ContainerStarted","Data":"2bed2e52b55108ed9d3499e9e71838fd0b60c87cd3c1b8ef8610a8da02e1029d"} Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.656357 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.663141 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerID="90696d8805c61145f07681a10dc909b6d868a96efeec4eb77b7e907f6b2d2f6b" exitCode=0 Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.663236 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" event={"ID":"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a","Type":"ContainerDied","Data":"90696d8805c61145f07681a10dc909b6d868a96efeec4eb77b7e907f6b2d2f6b"} Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.686591 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c23d1c4f-c3eb-46de-a125-9aa740adf6eb","Type":"ContainerStarted","Data":"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc"} Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.688801 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=22.68878755 podStartE2EDuration="22.68878755s" podCreationTimestamp="2025-12-01 07:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:13.686903068 +0000 UTC m=+1229.007710754" watchObservedRunningTime="2025-12-01 07:11:13.68878755 +0000 UTC m=+1229.009595236" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.751009 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-64649776b-r955c" podStartSLOduration=3.750991268 podStartE2EDuration="3.750991268s" podCreationTimestamp="2025-12-01 07:11:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:13.7147497 +0000 UTC m=+1229.035557386" watchObservedRunningTime="2025-12-01 07:11:13.750991268 +0000 UTC m=+1229.071798954" Dec 01 07:11:13 crc kubenswrapper[4822]: I1201 07:11:13.837592 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f6d79597f-nhz8d" podUID="de738eee-1f47-42ae-be49-a65d2fe8ea3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: i/o timeout" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.112041 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54446c57b5-gw6bc"] Dec 01 07:11:14 crc kubenswrapper[4822]: W1201 07:11:14.143780 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e7558a6_6804_48af_b74d_394b7c5dd57e.slice/crio-401ddc270177763ff4738a35dce8db7e419fab267f7c8ebd9abf6b81b9a48ddd WatchSource:0}: Error finding container 401ddc270177763ff4738a35dce8db7e419fab267f7c8ebd9abf6b81b9a48ddd: Status 404 returned error can't find the container with id 401ddc270177763ff4738a35dce8db7e419fab267f7c8ebd9abf6b81b9a48ddd Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.454714 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587006 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-config-data\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587196 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-logs\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587238 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587284 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-httpd-run\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587410 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r9kd\" (UniqueName: \"kubernetes.io/projected/43dea7cf-bd0b-475b-94ec-12a1a141b803-kube-api-access-4r9kd\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587456 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-scripts\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.587487 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-combined-ca-bundle\") pod \"43dea7cf-bd0b-475b-94ec-12a1a141b803\" (UID: \"43dea7cf-bd0b-475b-94ec-12a1a141b803\") " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.588369 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.588792 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-logs" (OuterVolumeSpecName: "logs") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.594882 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.595794 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-scripts" (OuterVolumeSpecName: "scripts") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.596287 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dea7cf-bd0b-475b-94ec-12a1a141b803-kube-api-access-4r9kd" (OuterVolumeSpecName: "kube-api-access-4r9kd") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "kube-api-access-4r9kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.637681 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.656646 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-config-data" (OuterVolumeSpecName: "config-data") pod "43dea7cf-bd0b-475b-94ec-12a1a141b803" (UID: "43dea7cf-bd0b-475b-94ec-12a1a141b803"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690041 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r9kd\" (UniqueName: \"kubernetes.io/projected/43dea7cf-bd0b-475b-94ec-12a1a141b803-kube-api-access-4r9kd\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690070 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690080 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690089 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dea7cf-bd0b-475b-94ec-12a1a141b803-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690099 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690121 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.690130 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43dea7cf-bd0b-475b-94ec-12a1a141b803-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.697312 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" event={"ID":"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a","Type":"ContainerStarted","Data":"6a6f8600e82ab47468d8adcd7d669f007b4eab85a3625dca88b611e3c0d5e275"} Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.698894 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.701225 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c23d1c4f-c3eb-46de-a125-9aa740adf6eb","Type":"ContainerStarted","Data":"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2"} Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.701335 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-log" containerID="cri-o://1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc" gracePeriod=30 Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.701560 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-httpd" containerID="cri-o://96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2" gracePeriod=30 Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.704705 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54446c57b5-gw6bc" event={"ID":"4e7558a6-6804-48af-b74d-394b7c5dd57e","Type":"ContainerStarted","Data":"401ddc270177763ff4738a35dce8db7e419fab267f7c8ebd9abf6b81b9a48ddd"} Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707198 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707343 4822 generic.go:334] "Generic (PLEG): container finished" podID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerID="6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0" exitCode=0 Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707376 4822 generic.go:334] "Generic (PLEG): container finished" podID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerID="7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605" exitCode=143 Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707387 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707414 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43dea7cf-bd0b-475b-94ec-12a1a141b803","Type":"ContainerDied","Data":"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0"} Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707450 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43dea7cf-bd0b-475b-94ec-12a1a141b803","Type":"ContainerDied","Data":"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605"} Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707461 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"43dea7cf-bd0b-475b-94ec-12a1a141b803","Type":"ContainerDied","Data":"4195fa6256a0d69dc1d1132965a8c64f7e855569004f9aa81e4e25a96e0516af"} Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.707478 4822 scope.go:117] "RemoveContainer" containerID="6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.735528 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" podStartSLOduration=4.7355048140000005 podStartE2EDuration="4.735504814s" podCreationTimestamp="2025-12-01 07:11:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:14.729350552 +0000 UTC m=+1230.050158238" watchObservedRunningTime="2025-12-01 07:11:14.735504814 +0000 UTC m=+1230.056312510" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.763861 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=23.763843971 podStartE2EDuration="23.763843971s" podCreationTimestamp="2025-12-01 07:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:14.758188172 +0000 UTC m=+1230.078995888" watchObservedRunningTime="2025-12-01 07:11:14.763843971 +0000 UTC m=+1230.084651657" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.783795 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.792048 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.797564 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.851838 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:11:14 crc kubenswrapper[4822]: E1201 07:11:14.852241 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-log" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.852258 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-log" Dec 01 07:11:14 crc kubenswrapper[4822]: E1201 07:11:14.852293 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-httpd" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.852300 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-httpd" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.852465 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-httpd" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.852484 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" containerName="glance-log" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.853440 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.856885 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.870982 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.872270 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.948754 4822 scope.go:117] "RemoveContainer" containerID="7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.967414 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43dea7cf-bd0b-475b-94ec-12a1a141b803" path="/var/lib/kubelet/pods/43dea7cf-bd0b-475b-94ec-12a1a141b803/volumes" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.994985 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995263 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995384 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995584 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-config-data\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995697 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-scripts\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995797 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv2wj\" (UniqueName: \"kubernetes.io/projected/911beba1-8e38-4051-a88c-71e0a9ac20a4-kube-api-access-vv2wj\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995888 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-logs\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:14 crc kubenswrapper[4822]: I1201 07:11:14.995979 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.097922 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-config-data\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.097968 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-scripts\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098001 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv2wj\" (UniqueName: \"kubernetes.io/projected/911beba1-8e38-4051-a88c-71e0a9ac20a4-kube-api-access-vv2wj\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098024 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-logs\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098045 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098082 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098114 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098159 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.098988 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.099125 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.099763 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-logs\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.114429 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-scripts\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.116932 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv2wj\" (UniqueName: \"kubernetes.io/projected/911beba1-8e38-4051-a88c-71e0a9ac20a4-kube-api-access-vv2wj\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.118004 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-config-data\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.120613 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.122478 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.133098 4822 scope.go:117] "RemoveContainer" containerID="6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0" Dec 01 07:11:15 crc kubenswrapper[4822]: E1201 07:11:15.134669 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0\": container with ID starting with 6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0 not found: ID does not exist" containerID="6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.134703 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0"} err="failed to get container status \"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0\": rpc error: code = NotFound desc = could not find container \"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0\": container with ID starting with 6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0 not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.134723 4822 scope.go:117] "RemoveContainer" containerID="7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605" Dec 01 07:11:15 crc kubenswrapper[4822]: E1201 07:11:15.135085 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605\": container with ID starting with 7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605 not found: ID does not exist" containerID="7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.135135 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605"} err="failed to get container status \"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605\": rpc error: code = NotFound desc = could not find container \"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605\": container with ID starting with 7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605 not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.135151 4822 scope.go:117] "RemoveContainer" containerID="6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.135447 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0"} err="failed to get container status \"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0\": rpc error: code = NotFound desc = could not find container \"6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0\": container with ID starting with 6d66401cdd6d8e46f6b4c13f1d72b40630f45b05f20e139df807627ad732f4e0 not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.135466 4822 scope.go:117] "RemoveContainer" containerID="7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.138726 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605"} err="failed to get container status \"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605\": rpc error: code = NotFound desc = could not find container \"7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605\": container with ID starting with 7dae96b43c95053edd6a9687c6278ec430149015d7242c84070d09f83b67f605 not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.153650 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.175915 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.662304 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716723 4822 generic.go:334] "Generic (PLEG): container finished" podID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerID="96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2" exitCode=0 Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716751 4822 generic.go:334] "Generic (PLEG): container finished" podID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerID="1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc" exitCode=143 Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716784 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c23d1c4f-c3eb-46de-a125-9aa740adf6eb","Type":"ContainerDied","Data":"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2"} Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716810 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c23d1c4f-c3eb-46de-a125-9aa740adf6eb","Type":"ContainerDied","Data":"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc"} Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716822 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c23d1c4f-c3eb-46de-a125-9aa740adf6eb","Type":"ContainerDied","Data":"e954df652cdfdb5927013a07862800dfcdc0b96262457cbdb8940f665bd1618a"} Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716836 4822 scope.go:117] "RemoveContainer" containerID="96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.716902 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.733973 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54446c57b5-gw6bc" event={"ID":"4e7558a6-6804-48af-b74d-394b7c5dd57e","Type":"ContainerStarted","Data":"995ca5fdebaa312592d184b7f0d85a781106462fc236fe76b19b5c65128c9922"} Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.734023 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54446c57b5-gw6bc" event={"ID":"4e7558a6-6804-48af-b74d-394b7c5dd57e","Type":"ContainerStarted","Data":"819d59f612f038c2b40fbd5bd6e0ae52aec5cfa454b47bb1de884f3a09c04531"} Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.735765 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.741247 4822 generic.go:334] "Generic (PLEG): container finished" podID="fb9401cf-62a5-407e-8ac0-88d0eecc830d" containerID="cddecd865f20481a440a7f35a1049390c9555274514b9c1774c27cfcf78dfe1b" exitCode=0 Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.741324 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xppc9" event={"ID":"fb9401cf-62a5-407e-8ac0-88d0eecc830d","Type":"ContainerDied","Data":"cddecd865f20481a440a7f35a1049390c9555274514b9c1774c27cfcf78dfe1b"} Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.756072 4822 scope.go:117] "RemoveContainer" containerID="1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.772366 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-54446c57b5-gw6bc" podStartSLOduration=3.77234354 podStartE2EDuration="3.77234354s" podCreationTimestamp="2025-12-01 07:11:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:15.758999305 +0000 UTC m=+1231.079806991" watchObservedRunningTime="2025-12-01 07:11:15.77234354 +0000 UTC m=+1231.093151246" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.790562 4822 scope.go:117] "RemoveContainer" containerID="96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2" Dec 01 07:11:15 crc kubenswrapper[4822]: E1201 07:11:15.791426 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2\": container with ID starting with 96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2 not found: ID does not exist" containerID="96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.791467 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2"} err="failed to get container status \"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2\": rpc error: code = NotFound desc = could not find container \"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2\": container with ID starting with 96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2 not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.791496 4822 scope.go:117] "RemoveContainer" containerID="1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc" Dec 01 07:11:15 crc kubenswrapper[4822]: E1201 07:11:15.791785 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc\": container with ID starting with 1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc not found: ID does not exist" containerID="1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.791813 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc"} err="failed to get container status \"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc\": rpc error: code = NotFound desc = could not find container \"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc\": container with ID starting with 1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.791829 4822 scope.go:117] "RemoveContainer" containerID="96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.792468 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2"} err="failed to get container status \"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2\": rpc error: code = NotFound desc = could not find container \"96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2\": container with ID starting with 96b11349d3b8552b097cdf1f4fdd2abfda66c168d5c7ec50041bf8ebca6a86c2 not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.792526 4822 scope.go:117] "RemoveContainer" containerID="1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.806943 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc"} err="failed to get container status \"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc\": rpc error: code = NotFound desc = could not find container \"1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc\": container with ID starting with 1293c67131f5c77fd25da5737e98c9b66d284322827f9d9b08321005a231b7bc not found: ID does not exist" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.815301 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-combined-ca-bundle\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.816915 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-config-data\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.816996 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-scripts\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.817118 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-httpd-run\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.817183 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.817271 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-logs\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.817325 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfj42\" (UniqueName: \"kubernetes.io/projected/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-kube-api-access-qfj42\") pod \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\" (UID: \"c23d1c4f-c3eb-46de-a125-9aa740adf6eb\") " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.818921 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.823961 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.824275 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-logs" (OuterVolumeSpecName: "logs") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.824840 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-scripts" (OuterVolumeSpecName: "scripts") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.828405 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-kube-api-access-qfj42" (OuterVolumeSpecName: "kube-api-access-qfj42") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "kube-api-access-qfj42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.877334 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.885836 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-config-data" (OuterVolumeSpecName: "config-data") pod "c23d1c4f-c3eb-46de-a125-9aa740adf6eb" (UID: "c23d1c4f-c3eb-46de-a125-9aa740adf6eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922016 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922082 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfj42\" (UniqueName: \"kubernetes.io/projected/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-kube-api-access-qfj42\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922119 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922132 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922144 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922153 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c23d1c4f-c3eb-46de-a125-9aa740adf6eb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.922213 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.933954 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:11:15 crc kubenswrapper[4822]: I1201 07:11:15.945173 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 07:11:15 crc kubenswrapper[4822]: W1201 07:11:15.945725 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod911beba1_8e38_4051_a88c_71e0a9ac20a4.slice/crio-6a74119847719bd60205b8a506667b4cbed56c5327b2a0fd5518c94927d3ed88 WatchSource:0}: Error finding container 6a74119847719bd60205b8a506667b4cbed56c5327b2a0fd5518c94927d3ed88: Status 404 returned error can't find the container with id 6a74119847719bd60205b8a506667b4cbed56c5327b2a0fd5518c94927d3ed88 Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.030740 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.081806 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.098669 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.104987 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:11:16 crc kubenswrapper[4822]: E1201 07:11:16.105318 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-httpd" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.105331 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-httpd" Dec 01 07:11:16 crc kubenswrapper[4822]: E1201 07:11:16.105346 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-log" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.105354 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-log" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.105514 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-log" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.105527 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" containerName="glance-httpd" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.106378 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.110284 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.110456 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.111875 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233464 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8z6l\" (UniqueName: \"kubernetes.io/projected/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-kube-api-access-w8z6l\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233513 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233536 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233615 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233635 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-logs\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233671 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233695 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.233722 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.335441 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336137 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336210 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336230 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-logs\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336273 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336297 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336328 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336369 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8z6l\" (UniqueName: \"kubernetes.io/projected/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-kube-api-access-w8z6l\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336928 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.336091 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.337439 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-logs\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.341111 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.343589 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.344184 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.350208 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.353370 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8z6l\" (UniqueName: \"kubernetes.io/projected/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-kube-api-access-w8z6l\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.389727 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.439353 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.757936 4822 generic.go:334] "Generic (PLEG): container finished" podID="8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" containerID="a96f54c06ad4ed4cadbe27b287eb8cc8b5491ac4590fd3bfa726e20944e4f2fd" exitCode=0 Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.757989 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6rkn" event={"ID":"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77","Type":"ContainerDied","Data":"a96f54c06ad4ed4cadbe27b287eb8cc8b5491ac4590fd3bfa726e20944e4f2fd"} Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.764565 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"911beba1-8e38-4051-a88c-71e0a9ac20a4","Type":"ContainerStarted","Data":"e016370e4b35515437781c55ea2fbcaf57b7c98d9107e21eb86fc167091c0bed"} Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.764611 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"911beba1-8e38-4051-a88c-71e0a9ac20a4","Type":"ContainerStarted","Data":"6a74119847719bd60205b8a506667b4cbed56c5327b2a0fd5518c94927d3ed88"} Dec 01 07:11:16 crc kubenswrapper[4822]: I1201 07:11:16.965084 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c23d1c4f-c3eb-46de-a125-9aa740adf6eb" path="/var/lib/kubelet/pods/c23d1c4f-c3eb-46de-a125-9aa740adf6eb/volumes" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.405733 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.491926 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-s26qn"] Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.492158 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f645789c-s26qn" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="dnsmasq-dns" containerID="cri-o://c4aec2584ebe7a419b01daa88d824e0c5e5215037d3af31ab07615090bcb2dae" gracePeriod=10 Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.668202 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xppc9" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.702733 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833222 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-credential-keys\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833647 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9401cf-62a5-407e-8ac0-88d0eecc830d-logs\") pod \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833743 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75dvt\" (UniqueName: \"kubernetes.io/projected/fb9401cf-62a5-407e-8ac0-88d0eecc830d-kube-api-access-75dvt\") pod \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833765 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-scripts\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833807 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-fernet-keys\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833872 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-combined-ca-bundle\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833936 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh5cc\" (UniqueName: \"kubernetes.io/projected/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-kube-api-access-bh5cc\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.833986 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-combined-ca-bundle\") pod \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.834032 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-config-data\") pod \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.834071 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-scripts\") pod \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\" (UID: \"fb9401cf-62a5-407e-8ac0-88d0eecc830d\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.834092 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.834122 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb9401cf-62a5-407e-8ac0-88d0eecc830d-logs" (OuterVolumeSpecName: "logs") pod "fb9401cf-62a5-407e-8ac0-88d0eecc830d" (UID: "fb9401cf-62a5-407e-8ac0-88d0eecc830d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.834506 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9401cf-62a5-407e-8ac0-88d0eecc830d-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.859868 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-scripts" (OuterVolumeSpecName: "scripts") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.859954 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.860186 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xppc9" event={"ID":"fb9401cf-62a5-407e-8ac0-88d0eecc830d","Type":"ContainerDied","Data":"c3ef05bdb917bb04c191537d0836844b2c9dedacb5a630ea15dda06d5e73bb6b"} Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.860219 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3ef05bdb917bb04c191537d0836844b2c9dedacb5a630ea15dda06d5e73bb6b" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.860289 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xppc9" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.862696 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9401cf-62a5-407e-8ac0-88d0eecc830d-kube-api-access-75dvt" (OuterVolumeSpecName: "kube-api-access-75dvt") pod "fb9401cf-62a5-407e-8ac0-88d0eecc830d" (UID: "fb9401cf-62a5-407e-8ac0-88d0eecc830d"). InnerVolumeSpecName "kube-api-access-75dvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.867056 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.872867 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-kube-api-access-bh5cc" (OuterVolumeSpecName: "kube-api-access-bh5cc") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77"). InnerVolumeSpecName "kube-api-access-bh5cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.872909 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-t6rkn" event={"ID":"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77","Type":"ContainerDied","Data":"dcf859362264e8e9d5b8d16a396d58a5c38bccf0acf48fcc630839a3b57cb3aa"} Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.872893 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-t6rkn" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.872941 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcf859362264e8e9d5b8d16a396d58a5c38bccf0acf48fcc630839a3b57cb3aa" Dec 01 07:11:20 crc kubenswrapper[4822]: E1201 07:11:20.900039 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data podName:8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77 nodeName:}" failed. No retries permitted until 2025-12-01 07:11:21.400000089 +0000 UTC m=+1236.720807775 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77") : error deleting /var/lib/kubelet/pods/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77/volume-subpaths: remove /var/lib/kubelet/pods/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77/volume-subpaths: no such file or directory Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.901230 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-scripts" (OuterVolumeSpecName: "scripts") pod "fb9401cf-62a5-407e-8ac0-88d0eecc830d" (UID: "fb9401cf-62a5-407e-8ac0-88d0eecc830d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.904353 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.911702 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-config-data" (OuterVolumeSpecName: "config-data") pod "fb9401cf-62a5-407e-8ac0-88d0eecc830d" (UID: "fb9401cf-62a5-407e-8ac0-88d0eecc830d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.926861 4822 generic.go:334] "Generic (PLEG): container finished" podID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerID="c4aec2584ebe7a419b01daa88d824e0c5e5215037d3af31ab07615090bcb2dae" exitCode=0 Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.926906 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-s26qn" event={"ID":"d28f5670-3f16-470f-8a83-a1dcaca7bce4","Type":"ContainerDied","Data":"c4aec2584ebe7a419b01daa88d824e0c5e5215037d3af31ab07615090bcb2dae"} Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.931088 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb9401cf-62a5-407e-8ac0-88d0eecc830d" (UID: "fb9401cf-62a5-407e-8ac0-88d0eecc830d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938206 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh5cc\" (UniqueName: \"kubernetes.io/projected/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-kube-api-access-bh5cc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938236 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938247 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938259 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9401cf-62a5-407e-8ac0-88d0eecc830d-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938271 4822 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938281 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75dvt\" (UniqueName: \"kubernetes.io/projected/fb9401cf-62a5-407e-8ac0-88d0eecc830d-kube-api-access-75dvt\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938292 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938301 4822 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:20 crc kubenswrapper[4822]: I1201 07:11:20.938311 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.125538 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.251351 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-svc\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.251435 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-nb\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.251457 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-config\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.251649 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lq87n\" (UniqueName: \"kubernetes.io/projected/d28f5670-3f16-470f-8a83-a1dcaca7bce4-kube-api-access-lq87n\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.251700 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-swift-storage-0\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.251717 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.266138 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d28f5670-3f16-470f-8a83-a1dcaca7bce4-kube-api-access-lq87n" (OuterVolumeSpecName: "kube-api-access-lq87n") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "kube-api-access-lq87n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.342904 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.352959 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.353159 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb\") pod \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\" (UID: \"d28f5670-3f16-470f-8a83-a1dcaca7bce4\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.353637 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.353651 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lq87n\" (UniqueName: \"kubernetes.io/projected/d28f5670-3f16-470f-8a83-a1dcaca7bce4-kube-api-access-lq87n\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: W1201 07:11:21.353725 4822 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/d28f5670-3f16-470f-8a83-a1dcaca7bce4/volumes/kubernetes.io~configmap/ovsdbserver-sb Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.353736 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.354339 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-config" (OuterVolumeSpecName: "config") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.356326 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.365037 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d28f5670-3f16-470f-8a83-a1dcaca7bce4" (UID: "d28f5670-3f16-470f-8a83-a1dcaca7bce4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.454732 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data\") pod \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\" (UID: \"8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77\") " Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.455305 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.455318 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.455326 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.455335 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d28f5670-3f16-470f-8a83-a1dcaca7bce4-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.459334 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data" (OuterVolumeSpecName: "config-data") pod "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" (UID: "8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.466106 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.556921 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.792169 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-55d788fdd6-vxlcs"] Dec 01 07:11:21 crc kubenswrapper[4822]: E1201 07:11:21.792769 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="init" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.792799 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="init" Dec 01 07:11:21 crc kubenswrapper[4822]: E1201 07:11:21.792828 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="dnsmasq-dns" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.792839 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="dnsmasq-dns" Dec 01 07:11:21 crc kubenswrapper[4822]: E1201 07:11:21.792861 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" containerName="keystone-bootstrap" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.792871 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" containerName="keystone-bootstrap" Dec 01 07:11:21 crc kubenswrapper[4822]: E1201 07:11:21.792887 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9401cf-62a5-407e-8ac0-88d0eecc830d" containerName="placement-db-sync" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.792895 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9401cf-62a5-407e-8ac0-88d0eecc830d" containerName="placement-db-sync" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.793116 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" containerName="keystone-bootstrap" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.793159 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9401cf-62a5-407e-8ac0-88d0eecc830d" containerName="placement-db-sync" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.793179 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="dnsmasq-dns" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.794241 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.797910 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.798094 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.798412 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qzrkz" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.798826 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.799809 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.817051 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55d788fdd6-vxlcs"] Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.894352 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5875588964-pg9h2"] Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.896123 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.902819 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.903262 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.903388 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.903588 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dcwzn" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.903639 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.903675 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.910068 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5875588964-pg9h2"] Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.962906 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-s26qn" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.962903 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-s26qn" event={"ID":"d28f5670-3f16-470f-8a83-a1dcaca7bce4","Type":"ContainerDied","Data":"2b2c8781222661d9dc65cc344c748402ef0d9d48147b9d234e56ad3e822876bf"} Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.963067 4822 scope.go:117] "RemoveContainer" containerID="c4aec2584ebe7a419b01daa88d824e0c5e5215037d3af31ab07615090bcb2dae" Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.978097 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"911beba1-8e38-4051-a88c-71e0a9ac20a4","Type":"ContainerStarted","Data":"c2450bf788f356d13f91c3cf9dd8d0ca0efcdb51deb175e0510e41c6dbedde4e"} Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.991836 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerStarted","Data":"7b62bd2bd8314759884d809bb5811f389da2d5db64b0441cc04f1ccd4ca09830"} Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.993728 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-zndwr" event={"ID":"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0","Type":"ContainerStarted","Data":"70cf9d294666fb1c698936022b2f9f0fdb8b835395a4a34acdd4f7d54a3e99b9"} Dec 01 07:11:21 crc kubenswrapper[4822]: I1201 07:11:21.998703 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2","Type":"ContainerStarted","Data":"6416935c4a73cd31879fe7faee99a7a861fedc4d5f95d5624eee541d08176870"} Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.016851 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-public-tls-certs\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.016910 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/020e76da-9968-4212-a34c-c01c8f8979de-logs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.016955 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvrsm\" (UniqueName: \"kubernetes.io/projected/020e76da-9968-4212-a34c-c01c8f8979de-kube-api-access-kvrsm\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017026 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-scripts\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017355 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-internal-tls-certs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017406 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-fernet-keys\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017434 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-credential-keys\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017484 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w8lg\" (UniqueName: \"kubernetes.io/projected/b422e423-4e50-4e96-a341-d7bb5188c4af-kube-api-access-7w8lg\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017715 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-scripts\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017746 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-public-tls-certs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017820 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-config-data\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017867 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-internal-tls-certs\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017922 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-config-data\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.017962 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-combined-ca-bundle\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.018016 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-combined-ca-bundle\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.021694 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.021666954 podStartE2EDuration="8.021666954s" podCreationTimestamp="2025-12-01 07:11:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:22.005647373 +0000 UTC m=+1237.326455079" watchObservedRunningTime="2025-12-01 07:11:22.021666954 +0000 UTC m=+1237.342474640" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.024296 4822 scope.go:117] "RemoveContainer" containerID="4a4012d67e9e9ab05aedc0b9fd3ce776d571a637172f17702a968378c4b88abf" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.040655 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-zndwr" podStartSLOduration=3.347157878 podStartE2EDuration="37.040635457s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="2025-12-01 07:10:47.206797037 +0000 UTC m=+1202.527604723" lastFinishedPulling="2025-12-01 07:11:20.900274616 +0000 UTC m=+1236.221082302" observedRunningTime="2025-12-01 07:11:22.036543052 +0000 UTC m=+1237.357350748" watchObservedRunningTime="2025-12-01 07:11:22.040635457 +0000 UTC m=+1237.361443153" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.058617 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-s26qn"] Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.066957 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-s26qn"] Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.119842 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-scripts\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.119892 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-internal-tls-certs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.119954 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-fernet-keys\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.119972 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-credential-keys\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120023 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w8lg\" (UniqueName: \"kubernetes.io/projected/b422e423-4e50-4e96-a341-d7bb5188c4af-kube-api-access-7w8lg\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120056 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-scripts\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120077 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-public-tls-certs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120094 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-config-data\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120130 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-internal-tls-certs\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120170 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-config-data\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120198 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-combined-ca-bundle\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120221 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-combined-ca-bundle\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120252 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-public-tls-certs\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120268 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/020e76da-9968-4212-a34c-c01c8f8979de-logs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.120300 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvrsm\" (UniqueName: \"kubernetes.io/projected/020e76da-9968-4212-a34c-c01c8f8979de-kube-api-access-kvrsm\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.125269 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/020e76da-9968-4212-a34c-c01c8f8979de-logs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.126060 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-public-tls-certs\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.129797 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-combined-ca-bundle\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.131080 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-credential-keys\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.131331 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-config-data\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.131830 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-combined-ca-bundle\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.132165 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-public-tls-certs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.135285 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-internal-tls-certs\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.138298 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-config-data\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.139313 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-scripts\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.144751 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-internal-tls-certs\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.144831 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-scripts\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.149131 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w8lg\" (UniqueName: \"kubernetes.io/projected/b422e423-4e50-4e96-a341-d7bb5188c4af-kube-api-access-7w8lg\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.149222 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvrsm\" (UniqueName: \"kubernetes.io/projected/020e76da-9968-4212-a34c-c01c8f8979de-kube-api-access-kvrsm\") pod \"placement-55d788fdd6-vxlcs\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.157162 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-fernet-keys\") pod \"keystone-5875588964-pg9h2\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.215379 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.413270 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.713940 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5875588964-pg9h2"] Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.917723 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55d788fdd6-vxlcs"] Dec 01 07:11:22 crc kubenswrapper[4822]: W1201 07:11:22.918490 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod020e76da_9968_4212_a34c_c01c8f8979de.slice/crio-b09b42c38ff49cec176bd20610570cb47b55062ed4275969a301088136795b7b WatchSource:0}: Error finding container b09b42c38ff49cec176bd20610570cb47b55062ed4275969a301088136795b7b: Status 404 returned error can't find the container with id b09b42c38ff49cec176bd20610570cb47b55062ed4275969a301088136795b7b Dec 01 07:11:22 crc kubenswrapper[4822]: I1201 07:11:22.969967 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" path="/var/lib/kubelet/pods/d28f5670-3f16-470f-8a83-a1dcaca7bce4/volumes" Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.010603 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55d788fdd6-vxlcs" event={"ID":"020e76da-9968-4212-a34c-c01c8f8979de","Type":"ContainerStarted","Data":"b09b42c38ff49cec176bd20610570cb47b55062ed4275969a301088136795b7b"} Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.012400 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2","Type":"ContainerStarted","Data":"6b21ee5ce8f0abec5b0d22757a6f4c5fbcb856a119ec2067aaacc6a204aa2e1f"} Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.012451 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2","Type":"ContainerStarted","Data":"7cd21da61099911a74c5da833d9f3efde198fb8f4dd79ac58463851e358cb535"} Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.020099 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5875588964-pg9h2" event={"ID":"b422e423-4e50-4e96-a341-d7bb5188c4af","Type":"ContainerStarted","Data":"65c8f8c2ad754b11428eef89af4d4e06df73f318c729e1210811528d1ba1f58d"} Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.020147 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5875588964-pg9h2" event={"ID":"b422e423-4e50-4e96-a341-d7bb5188c4af","Type":"ContainerStarted","Data":"80b95a27b54dc53f014f388b97e214223361e9740c2ce60a1998d7d39c72c3d7"} Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.020401 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.035404 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.035379112 podStartE2EDuration="7.035379112s" podCreationTimestamp="2025-12-01 07:11:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:23.029673301 +0000 UTC m=+1238.350480987" watchObservedRunningTime="2025-12-01 07:11:23.035379112 +0000 UTC m=+1238.356186798" Dec 01 07:11:23 crc kubenswrapper[4822]: I1201 07:11:23.068212 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5875588964-pg9h2" podStartSLOduration=2.068188865 podStartE2EDuration="2.068188865s" podCreationTimestamp="2025-12-01 07:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:23.049006265 +0000 UTC m=+1238.369813951" watchObservedRunningTime="2025-12-01 07:11:23.068188865 +0000 UTC m=+1238.388996541" Dec 01 07:11:24 crc kubenswrapper[4822]: I1201 07:11:24.030632 4822 generic.go:334] "Generic (PLEG): container finished" podID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" containerID="70cf9d294666fb1c698936022b2f9f0fdb8b835395a4a34acdd4f7d54a3e99b9" exitCode=0 Dec 01 07:11:24 crc kubenswrapper[4822]: I1201 07:11:24.030734 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-zndwr" event={"ID":"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0","Type":"ContainerDied","Data":"70cf9d294666fb1c698936022b2f9f0fdb8b835395a4a34acdd4f7d54a3e99b9"} Dec 01 07:11:24 crc kubenswrapper[4822]: I1201 07:11:24.034891 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55d788fdd6-vxlcs" event={"ID":"020e76da-9968-4212-a34c-c01c8f8979de","Type":"ContainerStarted","Data":"42a4de3b243b1977444983268e3455770ea130dc481d53f206e1f70bf6eac99c"} Dec 01 07:11:24 crc kubenswrapper[4822]: I1201 07:11:24.034957 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55d788fdd6-vxlcs" event={"ID":"020e76da-9968-4212-a34c-c01c8f8979de","Type":"ContainerStarted","Data":"7b63b2bf9e63e64f024e0d88a86fea704c8fb9efcca3eec122913d83b9c7b804"} Dec 01 07:11:24 crc kubenswrapper[4822]: I1201 07:11:24.075736 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-55d788fdd6-vxlcs" podStartSLOduration=3.075699149 podStartE2EDuration="3.075699149s" podCreationTimestamp="2025-12-01 07:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:24.073166008 +0000 UTC m=+1239.393973694" watchObservedRunningTime="2025-12-01 07:11:24.075699149 +0000 UTC m=+1239.396506855" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.081772 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wprf" event={"ID":"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693","Type":"ContainerStarted","Data":"5e6a9acca1e0a2760560eb2794c3fdbee1c7e308ba21194c4a721e5e3aa20688"} Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.082208 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.082799 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.119965 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-7wprf" podStartSLOduration=3.523375061 podStartE2EDuration="40.119944307s" podCreationTimestamp="2025-12-01 07:10:45 +0000 UTC" firstStartedPulling="2025-12-01 07:10:46.978965627 +0000 UTC m=+1202.299773303" lastFinishedPulling="2025-12-01 07:11:23.575534863 +0000 UTC m=+1238.896342549" observedRunningTime="2025-12-01 07:11:25.105638055 +0000 UTC m=+1240.426445751" watchObservedRunningTime="2025-12-01 07:11:25.119944307 +0000 UTC m=+1240.440752003" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.176457 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.176496 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.216393 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 07:11:25 crc kubenswrapper[4822]: I1201 07:11:25.243209 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.008452 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-55f645789c-s26qn" podUID="d28f5670-3f16-470f-8a83-a1dcaca7bce4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: i/o timeout" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.090133 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.090176 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.441027 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.441088 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.480177 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:26 crc kubenswrapper[4822]: I1201 07:11:26.532031 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:27 crc kubenswrapper[4822]: I1201 07:11:27.098816 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:27 crc kubenswrapper[4822]: I1201 07:11:27.098860 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.128107 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.130718 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.131623 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.766665 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-zndwr" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.874185 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-db-sync-config-data\") pod \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.874251 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-combined-ca-bundle\") pod \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.874366 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrrkq\" (UniqueName: \"kubernetes.io/projected/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-kube-api-access-mrrkq\") pod \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\" (UID: \"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0\") " Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.883172 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" (UID: "b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.895813 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-kube-api-access-mrrkq" (OuterVolumeSpecName: "kube-api-access-mrrkq") pod "b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" (UID: "b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0"). InnerVolumeSpecName "kube-api-access-mrrkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.903148 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" (UID: "b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.976225 4822 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.976258 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:28 crc kubenswrapper[4822]: I1201 07:11:28.976298 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrrkq\" (UniqueName: \"kubernetes.io/projected/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0-kube-api-access-mrrkq\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.009384 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.011909 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.124587 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-zndwr" event={"ID":"b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0","Type":"ContainerDied","Data":"1903828d715011c1a3f18ccdfb1ae9022623c71aafe22414bb123350378e4342"} Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.124637 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1903828d715011c1a3f18ccdfb1ae9022623c71aafe22414bb123350378e4342" Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.124710 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-zndwr" Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.127866 4822 generic.go:334] "Generic (PLEG): container finished" podID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" containerID="5e6a9acca1e0a2760560eb2794c3fdbee1c7e308ba21194c4a721e5e3aa20688" exitCode=0 Dec 01 07:11:29 crc kubenswrapper[4822]: I1201 07:11:29.128655 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wprf" event={"ID":"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693","Type":"ContainerDied","Data":"5e6a9acca1e0a2760560eb2794c3fdbee1c7e308ba21194c4a721e5e3aa20688"} Dec 01 07:11:29 crc kubenswrapper[4822]: E1201 07:11:29.515515 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.044051 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-56d5c94499-xwfh7"] Dec 01 07:11:30 crc kubenswrapper[4822]: E1201 07:11:30.044803 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" containerName="barbican-db-sync" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.044906 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" containerName="barbican-db-sync" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.045182 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" containerName="barbican-db-sync" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.046421 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.053173 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.053389 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.053628 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vqghq" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.064522 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.065941 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.079999 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.111160 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-56d5c94499-xwfh7"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.148296 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.170439 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="ceilometer-notification-agent" containerID="cri-o://a41bde461beb827ce50b7240b0b6439c5b96badbdb4ac27de199c04c17f3e7ba" gracePeriod=30 Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.170874 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerStarted","Data":"ce380fd5788e3aa8db496ccc53254f0f4c9ec52545416693ae0b33fe1436c5fa"} Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.170995 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="sg-core" containerID="cri-o://7b62bd2bd8314759884d809bb5811f389da2d5db64b0441cc04f1ccd4ca09830" gracePeriod=30 Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.171134 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="proxy-httpd" containerID="cri-o://ce380fd5788e3aa8db496ccc53254f0f4c9ec52545416693ae0b33fe1436c5fa" gracePeriod=30 Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.171158 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.178928 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-ftjhx"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.180289 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.186685 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-ftjhx"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.196330 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-logs\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.196602 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-combined-ca-bundle\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197387 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlnpl\" (UniqueName: \"kubernetes.io/projected/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-kube-api-access-tlnpl\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197448 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x29v\" (UniqueName: \"kubernetes.io/projected/a1df0a4e-4359-436c-9937-e4af9b500ae5-kube-api-access-5x29v\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197487 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197530 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197609 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data-custom\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197710 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data-custom\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197760 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-combined-ca-bundle\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.197784 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1df0a4e-4359-436c-9937-e4af9b500ae5-logs\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.302410 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data-custom\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303291 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-combined-ca-bundle\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303320 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1df0a4e-4359-436c-9937-e4af9b500ae5-logs\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303374 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-swift-storage-0\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303439 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-logs\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303476 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-nb\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303506 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-combined-ca-bundle\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303524 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlnpl\" (UniqueName: \"kubernetes.io/projected/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-kube-api-access-tlnpl\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303560 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-config\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303583 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-sb\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303601 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x29v\" (UniqueName: \"kubernetes.io/projected/a1df0a4e-4359-436c-9937-e4af9b500ae5-kube-api-access-5x29v\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303621 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303647 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303673 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-svc\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303717 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njcxt\" (UniqueName: \"kubernetes.io/projected/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-kube-api-access-njcxt\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.303740 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data-custom\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.304447 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7649fdcd84-lj592"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.306035 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.306097 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-logs\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.307644 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1df0a4e-4359-436c-9937-e4af9b500ae5-logs\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.334599 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.335022 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-combined-ca-bundle\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.335484 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-combined-ca-bundle\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.335922 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data-custom\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.337701 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.338509 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data-custom\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.342321 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x29v\" (UniqueName: \"kubernetes.io/projected/a1df0a4e-4359-436c-9937-e4af9b500ae5-kube-api-access-5x29v\") pod \"barbican-keystone-listener-67cf4fc9cd-s6rh6\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.343048 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7649fdcd84-lj592"] Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.343265 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlnpl\" (UniqueName: \"kubernetes.io/projected/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-kube-api-access-tlnpl\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.355983 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data\") pod \"barbican-worker-56d5c94499-xwfh7\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.385494 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.404880 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.404940 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5fd8bbb-0020-435d-8934-47840b591495-logs\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.404968 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-nb\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405152 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data-custom\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405199 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-config\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405230 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-combined-ca-bundle\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405251 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-sb\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405304 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-svc\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405335 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-992xp\" (UniqueName: \"kubernetes.io/projected/f5fd8bbb-0020-435d-8934-47840b591495-kube-api-access-992xp\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405361 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njcxt\" (UniqueName: \"kubernetes.io/projected/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-kube-api-access-njcxt\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.405408 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-swift-storage-0\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.406273 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-swift-storage-0\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.406834 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-nb\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.407340 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-config\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.407970 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-sb\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.408505 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-svc\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.411862 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.440588 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njcxt\" (UniqueName: \"kubernetes.io/projected/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-kube-api-access-njcxt\") pod \"dnsmasq-dns-64dfd64c45-ftjhx\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.510232 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.510509 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5fd8bbb-0020-435d-8934-47840b591495-logs\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.510707 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data-custom\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.510785 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-combined-ca-bundle\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.510899 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-992xp\" (UniqueName: \"kubernetes.io/projected/f5fd8bbb-0020-435d-8934-47840b591495-kube-api-access-992xp\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.512494 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5fd8bbb-0020-435d-8934-47840b591495-logs\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.534164 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-992xp\" (UniqueName: \"kubernetes.io/projected/f5fd8bbb-0020-435d-8934-47840b591495-kube-api-access-992xp\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.538876 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data-custom\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.544652 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.547393 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-combined-ca-bundle\") pod \"barbican-api-7649fdcd84-lj592\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.573469 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.614181 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wprf" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.716651 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-scripts\") pod \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.716714 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78cv6\" (UniqueName: \"kubernetes.io/projected/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-kube-api-access-78cv6\") pod \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.716825 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-db-sync-config-data\") pod \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.716873 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-etc-machine-id\") pod \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.716943 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-config-data\") pod \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.717007 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-combined-ca-bundle\") pod \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\" (UID: \"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693\") " Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.719625 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" (UID: "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.723145 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-scripts" (OuterVolumeSpecName: "scripts") pod "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" (UID: "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.731656 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" (UID: "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.757061 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.768860 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-kube-api-access-78cv6" (OuterVolumeSpecName: "kube-api-access-78cv6") pod "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" (UID: "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693"). InnerVolumeSpecName "kube-api-access-78cv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.786955 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" (UID: "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.820709 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.820741 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.820751 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78cv6\" (UniqueName: \"kubernetes.io/projected/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-kube-api-access-78cv6\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.820762 4822 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.820770 4822 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.829055 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-config-data" (OuterVolumeSpecName: "config-data") pod "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" (UID: "ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:30 crc kubenswrapper[4822]: I1201 07:11:30.922850 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.039928 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-56d5c94499-xwfh7"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.135436 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6"] Dec 01 07:11:31 crc kubenswrapper[4822]: W1201 07:11:31.154201 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1df0a4e_4359_436c_9937_e4af9b500ae5.slice/crio-18f3ecfd0c70308cc1b03a641ada8603d9bbe1e329d3b6fa31b513a75df8afd4 WatchSource:0}: Error finding container 18f3ecfd0c70308cc1b03a641ada8603d9bbe1e329d3b6fa31b513a75df8afd4: Status 404 returned error can't find the container with id 18f3ecfd0c70308cc1b03a641ada8603d9bbe1e329d3b6fa31b513a75df8afd4 Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.189463 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56d5c94499-xwfh7" event={"ID":"e2b0531f-a401-46e6-80f0-7f0023e9a0d6","Type":"ContainerStarted","Data":"9ec8e591da8b03e1ee0179291bdb1a7cdb3b03f0d732e26febaced297545314f"} Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.214413 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerID="ce380fd5788e3aa8db496ccc53254f0f4c9ec52545416693ae0b33fe1436c5fa" exitCode=0 Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.214462 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerID="7b62bd2bd8314759884d809bb5811f389da2d5db64b0441cc04f1ccd4ca09830" exitCode=2 Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.214470 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerID="a41bde461beb827ce50b7240b0b6439c5b96badbdb4ac27de199c04c17f3e7ba" exitCode=0 Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.214523 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerDied","Data":"ce380fd5788e3aa8db496ccc53254f0f4c9ec52545416693ae0b33fe1436c5fa"} Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.214564 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerDied","Data":"7b62bd2bd8314759884d809bb5811f389da2d5db64b0441cc04f1ccd4ca09830"} Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.214574 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerDied","Data":"a41bde461beb827ce50b7240b0b6439c5b96badbdb4ac27de199c04c17f3e7ba"} Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.222481 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" event={"ID":"a1df0a4e-4359-436c-9937-e4af9b500ae5","Type":"ContainerStarted","Data":"18f3ecfd0c70308cc1b03a641ada8603d9bbe1e329d3b6fa31b513a75df8afd4"} Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.242864 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7wprf" event={"ID":"ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693","Type":"ContainerDied","Data":"2147726a83d08adca032521f99ccabbea481a1946e5f7244c2c73261909e548b"} Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.242912 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2147726a83d08adca032521f99ccabbea481a1946e5f7244c2c73261909e548b" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.242982 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7wprf" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.287938 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-ftjhx"] Dec 01 07:11:31 crc kubenswrapper[4822]: W1201 07:11:31.308742 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbdf4d8c_1389_45e3_9858_71fecd8b1b9c.slice/crio-1c7b3cbfd366f5d2e74ef105dc9ab6bdf72d313870051829762939ec6e71db3d WatchSource:0}: Error finding container 1c7b3cbfd366f5d2e74ef105dc9ab6bdf72d313870051829762939ec6e71db3d: Status 404 returned error can't find the container with id 1c7b3cbfd366f5d2e74ef105dc9ab6bdf72d313870051829762939ec6e71db3d Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.407469 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7649fdcd84-lj592"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.542662 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:31 crc kubenswrapper[4822]: E1201 07:11:31.543173 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" containerName="cinder-db-sync" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.543188 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" containerName="cinder-db-sync" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.543365 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" containerName="cinder-db-sync" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.545164 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.551864 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552208 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552204 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552359 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552447 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h2sv\" (UniqueName: \"kubernetes.io/projected/14297bb6-88ad-438f-afb1-af681311e3b5-kube-api-access-6h2sv\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552526 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/14297bb6-88ad-438f-afb1-af681311e3b5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552616 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-scripts\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552707 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.552885 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.566460 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-lhmwk" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.590682 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.596652 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.615057 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-ftjhx"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.641538 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-h7s98"] Dec 01 07:11:31 crc kubenswrapper[4822]: E1201 07:11:31.641978 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="ceilometer-notification-agent" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.641994 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="ceilometer-notification-agent" Dec 01 07:11:31 crc kubenswrapper[4822]: E1201 07:11:31.642019 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="proxy-httpd" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.642028 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="proxy-httpd" Dec 01 07:11:31 crc kubenswrapper[4822]: E1201 07:11:31.642043 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="sg-core" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.642049 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="sg-core" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.642270 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="proxy-httpd" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.642299 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="sg-core" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.642309 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" containerName="ceilometer-notification-agent" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.643434 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.656390 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-config-data\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.656974 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldk4q\" (UniqueName: \"kubernetes.io/projected/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-kube-api-access-ldk4q\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657010 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-combined-ca-bundle\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657064 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-scripts\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657135 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-sg-core-conf-yaml\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657154 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-log-httpd\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657268 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-run-httpd\") pod \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\" (UID: \"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162\") " Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657545 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657797 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-config\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657831 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-sb\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657888 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-svc\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657916 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.657968 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.658002 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-swift-storage-0\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.658025 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h2sv\" (UniqueName: \"kubernetes.io/projected/14297bb6-88ad-438f-afb1-af681311e3b5-kube-api-access-6h2sv\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.658058 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-nb\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.658103 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/14297bb6-88ad-438f-afb1-af681311e3b5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.658142 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-scripts\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.658176 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlnn6\" (UniqueName: \"kubernetes.io/projected/149ce159-6c78-4062-8145-5aa08ff2f3fb-kube-api-access-jlnn6\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.659697 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.670772 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.671028 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.672573 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/14297bb6-88ad-438f-afb1-af681311e3b5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.673367 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-scripts" (OuterVolumeSpecName: "scripts") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.684418 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-scripts\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.693154 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.696503 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.701370 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-kube-api-access-ldk4q" (OuterVolumeSpecName: "kube-api-access-ldk4q") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "kube-api-access-ldk4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.704999 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h2sv\" (UniqueName: \"kubernetes.io/projected/14297bb6-88ad-438f-afb1-af681311e3b5-kube-api-access-6h2sv\") pod \"cinder-scheduler-0\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.710528 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-h7s98"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.718347 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.720602 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.721931 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.732648 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760422 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-svc\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760485 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760511 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760567 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-swift-storage-0\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760591 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e21c31-73a8-4980-9566-fe836549e8b5-logs\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760619 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-nb\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.760639 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e21c31-73a8-4980-9566-fe836549e8b5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761394 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-svc\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761421 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761469 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-scripts\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761489 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnv5g\" (UniqueName: \"kubernetes.io/projected/e0e21c31-73a8-4980-9566-fe836549e8b5-kube-api-access-vnv5g\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761532 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlnn6\" (UniqueName: \"kubernetes.io/projected/149ce159-6c78-4062-8145-5aa08ff2f3fb-kube-api-access-jlnn6\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761637 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-config\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761668 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-sb\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761720 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761732 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldk4q\" (UniqueName: \"kubernetes.io/projected/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-kube-api-access-ldk4q\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761746 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761756 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.761937 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-swift-storage-0\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.762481 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-nb\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.762962 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-sb\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.763268 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-config\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.798332 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlnn6\" (UniqueName: \"kubernetes.io/projected/149ce159-6c78-4062-8145-5aa08ff2f3fb-kube-api-access-jlnn6\") pod \"dnsmasq-dns-8ccb5c7cf-h7s98\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.816027 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.833980 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863653 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863695 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863738 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e21c31-73a8-4980-9566-fe836549e8b5-logs\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863762 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e21c31-73a8-4980-9566-fe836549e8b5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863778 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863798 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-scripts\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863814 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnv5g\" (UniqueName: \"kubernetes.io/projected/e0e21c31-73a8-4980-9566-fe836549e8b5-kube-api-access-vnv5g\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863901 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.863913 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.864306 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e21c31-73a8-4980-9566-fe836549e8b5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.864533 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e21c31-73a8-4980-9566-fe836549e8b5-logs\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.867339 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.867812 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.867892 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.868197 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-scripts\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.879462 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnv5g\" (UniqueName: \"kubernetes.io/projected/e0e21c31-73a8-4980-9566-fe836549e8b5-kube-api-access-vnv5g\") pod \"cinder-api-0\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " pod="openstack/cinder-api-0" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.882927 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-config-data" (OuterVolumeSpecName: "config-data") pod "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" (UID: "c8f5b07d-19c7-4b1a-90a9-6b19bb76e162"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.966818 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:31 crc kubenswrapper[4822]: I1201 07:11:31.978830 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.015489 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.022688 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.259629 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8f5b07d-19c7-4b1a-90a9-6b19bb76e162","Type":"ContainerDied","Data":"f5dfc0eafa62543ad16188b1fe4ccab079d6cbc6a2f0a270e0a8b607b89744e6"} Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.259654 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.260052 4822 scope.go:117] "RemoveContainer" containerID="ce380fd5788e3aa8db496ccc53254f0f4c9ec52545416693ae0b33fe1436c5fa" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.275995 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7649fdcd84-lj592" event={"ID":"f5fd8bbb-0020-435d-8934-47840b591495","Type":"ContainerStarted","Data":"6056a5b46da462ad08ba967cfabd58573a3cf9478d8c00cc9963f0ee5d8aad8e"} Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.276039 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7649fdcd84-lj592" event={"ID":"f5fd8bbb-0020-435d-8934-47840b591495","Type":"ContainerStarted","Data":"fa1a42c217e8af5cb3cf84551074fc2dad34352132042251e52f7f02cce1197f"} Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.276049 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7649fdcd84-lj592" event={"ID":"f5fd8bbb-0020-435d-8934-47840b591495","Type":"ContainerStarted","Data":"fc04a4885f1ccb942a86ecd673f0ba73ec8d958a53c4a6877960a1989d472940"} Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.276745 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.276783 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.290731 4822 generic.go:334] "Generic (PLEG): container finished" podID="dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" containerID="10a03947ab1851748e46153da6dd6d940c97d2c251ee55a7d06a06f5a319e5db" exitCode=0 Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.290774 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" event={"ID":"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c","Type":"ContainerDied","Data":"10a03947ab1851748e46153da6dd6d940c97d2c251ee55a7d06a06f5a319e5db"} Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.290800 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" event={"ID":"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c","Type":"ContainerStarted","Data":"1c7b3cbfd366f5d2e74ef105dc9ab6bdf72d313870051829762939ec6e71db3d"} Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.298922 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7649fdcd84-lj592" podStartSLOduration=2.2989023619999998 podStartE2EDuration="2.298902362s" podCreationTimestamp="2025-12-01 07:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:32.297335188 +0000 UTC m=+1247.618142874" watchObservedRunningTime="2025-12-01 07:11:32.298902362 +0000 UTC m=+1247.619710048" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.393631 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.409161 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.418832 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.421316 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.425452 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.426557 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.444167 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.497487 4822 scope.go:117] "RemoveContainer" containerID="7b62bd2bd8314759884d809bb5811f389da2d5db64b0441cc04f1ccd4ca09830" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.579040 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-scripts\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.579779 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-log-httpd\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.579895 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-config-data\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.579955 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.579982 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-run-httpd\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.580029 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.580055 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7zgt\" (UniqueName: \"kubernetes.io/projected/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-kube-api-access-l7zgt\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682788 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-log-httpd\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682840 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-config-data\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682882 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682901 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-run-httpd\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682934 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682954 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7zgt\" (UniqueName: \"kubernetes.io/projected/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-kube-api-access-l7zgt\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.682978 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-scripts\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.683838 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-run-httpd\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.684747 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-log-httpd\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.689152 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.689346 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-config-data\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.691859 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.703519 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-scripts\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.703792 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7zgt\" (UniqueName: \"kubernetes.io/projected/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-kube-api-access-l7zgt\") pod \"ceilometer-0\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.742278 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:11:32 crc kubenswrapper[4822]: I1201 07:11:32.969880 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8f5b07d-19c7-4b1a-90a9-6b19bb76e162" path="/var/lib/kubelet/pods/c8f5b07d-19c7-4b1a-90a9-6b19bb76e162/volumes" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.056751 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.077503 4822 scope.go:117] "RemoveContainer" containerID="a41bde461beb827ce50b7240b0b6439c5b96badbdb4ac27de199c04c17f3e7ba" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.195290 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-swift-storage-0\") pod \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.195334 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-config\") pod \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.195623 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-sb\") pod \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.195640 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-svc\") pod \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.195755 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njcxt\" (UniqueName: \"kubernetes.io/projected/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-kube-api-access-njcxt\") pod \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.195808 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-nb\") pod \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\" (UID: \"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c\") " Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.214241 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-kube-api-access-njcxt" (OuterVolumeSpecName: "kube-api-access-njcxt") pod "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" (UID: "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c"). InnerVolumeSpecName "kube-api-access-njcxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.222097 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" (UID: "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.223099 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" (UID: "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.226357 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-config" (OuterVolumeSpecName: "config") pod "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" (UID: "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.230344 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" (UID: "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.238486 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" (UID: "dbdf4d8c-1389-45e3-9858-71fecd8b1b9c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.298196 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njcxt\" (UniqueName: \"kubernetes.io/projected/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-kube-api-access-njcxt\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.298220 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.298229 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.298240 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.298252 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.298288 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.316934 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" event={"ID":"dbdf4d8c-1389-45e3-9858-71fecd8b1b9c","Type":"ContainerDied","Data":"1c7b3cbfd366f5d2e74ef105dc9ab6bdf72d313870051829762939ec6e71db3d"} Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.318597 4822 scope.go:117] "RemoveContainer" containerID="10a03947ab1851748e46153da6dd6d940c97d2c251ee55a7d06a06f5a319e5db" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.317521 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-ftjhx" Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.400644 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-ftjhx"] Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.412474 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-ftjhx"] Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.426093 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-h7s98"] Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.442681 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.592763 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:33 crc kubenswrapper[4822]: W1201 07:11:33.605686 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0e21c31_73a8_4980_9566_fe836549e8b5.slice/crio-db918edb5286ed98320e0dab9ab75a9d313e416d366a31cf8eef8bb15ea1c82a WatchSource:0}: Error finding container db918edb5286ed98320e0dab9ab75a9d313e416d366a31cf8eef8bb15ea1c82a: Status 404 returned error can't find the container with id db918edb5286ed98320e0dab9ab75a9d313e416d366a31cf8eef8bb15ea1c82a Dec 01 07:11:33 crc kubenswrapper[4822]: W1201 07:11:33.607690 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2db9ac4_2fdc_414c_ac4a_9b9c187ed862.slice/crio-084bfc44e6b7192a948ad47ac5b427bc5fab3a8b0ce1e8189dc3faa90c024320 WatchSource:0}: Error finding container 084bfc44e6b7192a948ad47ac5b427bc5fab3a8b0ce1e8189dc3faa90c024320: Status 404 returned error can't find the container with id 084bfc44e6b7192a948ad47ac5b427bc5fab3a8b0ce1e8189dc3faa90c024320 Dec 01 07:11:33 crc kubenswrapper[4822]: I1201 07:11:33.610307 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.335656 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e0e21c31-73a8-4980-9566-fe836549e8b5","Type":"ContainerStarted","Data":"db918edb5286ed98320e0dab9ab75a9d313e416d366a31cf8eef8bb15ea1c82a"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.338118 4822 generic.go:334] "Generic (PLEG): container finished" podID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerID="d0c017e15441d391d197b6e125bdf48aa979b07e3ae258a578511b6ca7160f47" exitCode=0 Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.338191 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" event={"ID":"149ce159-6c78-4062-8145-5aa08ff2f3fb","Type":"ContainerDied","Data":"d0c017e15441d391d197b6e125bdf48aa979b07e3ae258a578511b6ca7160f47"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.338225 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" event={"ID":"149ce159-6c78-4062-8145-5aa08ff2f3fb","Type":"ContainerStarted","Data":"0276a0de51fd2504956dbf7221f76fe6d60bd7f5410aa26f78fdb81073235634"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.346616 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"14297bb6-88ad-438f-afb1-af681311e3b5","Type":"ContainerStarted","Data":"dc39dee31744b3f291bfcfe5f2f41455d3405b7d2709cea7f5aa60a1a8c561ec"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.376082 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerStarted","Data":"084bfc44e6b7192a948ad47ac5b427bc5fab3a8b0ce1e8189dc3faa90c024320"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.379921 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56d5c94499-xwfh7" event={"ID":"e2b0531f-a401-46e6-80f0-7f0023e9a0d6","Type":"ContainerStarted","Data":"ec6d0903bf7c036cf0d4792cee042afc8037b300b849a72d91c313222cd086a0"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.379956 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56d5c94499-xwfh7" event={"ID":"e2b0531f-a401-46e6-80f0-7f0023e9a0d6","Type":"ContainerStarted","Data":"eac2f0524072a06a976ebc4a00a7e18a6672bbea7169f996d10dfd7fb8ff0ee3"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.399992 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-56d5c94499-xwfh7" podStartSLOduration=2.242284818 podStartE2EDuration="4.39996934s" podCreationTimestamp="2025-12-01 07:11:30 +0000 UTC" firstStartedPulling="2025-12-01 07:11:31.049985218 +0000 UTC m=+1246.370792904" lastFinishedPulling="2025-12-01 07:11:33.20766974 +0000 UTC m=+1248.528477426" observedRunningTime="2025-12-01 07:11:34.398055366 +0000 UTC m=+1249.718863052" watchObservedRunningTime="2025-12-01 07:11:34.39996934 +0000 UTC m=+1249.720777026" Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.420926 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" event={"ID":"a1df0a4e-4359-436c-9937-e4af9b500ae5","Type":"ContainerStarted","Data":"d9ad4dec5b7ced2f8a0abc30fb3b367a52102c69895a9418a23c7124dd5666eb"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.420960 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" event={"ID":"a1df0a4e-4359-436c-9937-e4af9b500ae5","Type":"ContainerStarted","Data":"0904f4c58b2a13e4a8f5dea770e37c7608b4c6d0e98aeca868ba95e684843122"} Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.462480 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" podStartSLOduration=2.544822307 podStartE2EDuration="4.462454687s" podCreationTimestamp="2025-12-01 07:11:30 +0000 UTC" firstStartedPulling="2025-12-01 07:11:31.160239279 +0000 UTC m=+1246.481046965" lastFinishedPulling="2025-12-01 07:11:33.077871659 +0000 UTC m=+1248.398679345" observedRunningTime="2025-12-01 07:11:34.451240772 +0000 UTC m=+1249.772048458" watchObservedRunningTime="2025-12-01 07:11:34.462454687 +0000 UTC m=+1249.783262373" Dec 01 07:11:34 crc kubenswrapper[4822]: I1201 07:11:34.964126 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" path="/var/lib/kubelet/pods/dbdf4d8c-1389-45e3-9858-71fecd8b1b9c/volumes" Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.432161 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e0e21c31-73a8-4980-9566-fe836549e8b5","Type":"ContainerStarted","Data":"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd"} Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.432209 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e0e21c31-73a8-4980-9566-fe836549e8b5","Type":"ContainerStarted","Data":"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66"} Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.432662 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.442215 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" event={"ID":"149ce159-6c78-4062-8145-5aa08ff2f3fb","Type":"ContainerStarted","Data":"3a1ad89ce942377a7fd0f9f31e663bc1c039f8c7a99ef0ea4a07765e450096f5"} Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.442292 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.458097 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.458078328 podStartE2EDuration="4.458078328s" podCreationTimestamp="2025-12-01 07:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:35.456041011 +0000 UTC m=+1250.776848697" watchObservedRunningTime="2025-12-01 07:11:35.458078328 +0000 UTC m=+1250.778886014" Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.472449 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"14297bb6-88ad-438f-afb1-af681311e3b5","Type":"ContainerStarted","Data":"ffa9069e31e7e4f177836e190599b849cde3830e468dafa8ed7d62114db3aae7"} Dec 01 07:11:35 crc kubenswrapper[4822]: I1201 07:11:35.485417 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerStarted","Data":"59c79f57c551b38dcff370abaa16d1ece43bee9e5687047a0bb2960aa500c9bd"} Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.086099 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" podStartSLOduration=5.08608528 podStartE2EDuration="5.08608528s" podCreationTimestamp="2025-12-01 07:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:35.483788411 +0000 UTC m=+1250.804596097" watchObservedRunningTime="2025-12-01 07:11:36.08608528 +0000 UTC m=+1251.406892966" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.106673 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.508216 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"14297bb6-88ad-438f-afb1-af681311e3b5","Type":"ContainerStarted","Data":"d8ae9f3f72c10090b3836cca4c19f9fbb08bae4e11edeb4b6da22d259ef203dc"} Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.511088 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerStarted","Data":"4fc9d1e1036db88f60a278580b3c351e88f7e3893ae0bdbdd4d80ffdae35a520"} Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.511134 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerStarted","Data":"c34156c2014761490e0d1d9d3e71e9381093b77f6e26e24ad7138af2f868ad62"} Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.541348 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.588542148 podStartE2EDuration="5.541329683s" podCreationTimestamp="2025-12-01 07:11:31 +0000 UTC" firstStartedPulling="2025-12-01 07:11:33.454191602 +0000 UTC m=+1248.774999288" lastFinishedPulling="2025-12-01 07:11:34.406979137 +0000 UTC m=+1249.727786823" observedRunningTime="2025-12-01 07:11:36.526390372 +0000 UTC m=+1251.847198058" watchObservedRunningTime="2025-12-01 07:11:36.541329683 +0000 UTC m=+1251.862137369" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.813161 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6bdbb4cd8b-5ttmd"] Dec 01 07:11:36 crc kubenswrapper[4822]: E1201 07:11:36.813798 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" containerName="init" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.813816 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" containerName="init" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.814179 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbdf4d8c-1389-45e3-9858-71fecd8b1b9c" containerName="init" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.815296 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.818095 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.818407 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.869038 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6bdbb4cd8b-5ttmd"] Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.885715 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.885815 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f5cf9f6-a48b-455b-aef3-952697eb1a09-logs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.885891 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-internal-tls-certs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.885922 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv5gn\" (UniqueName: \"kubernetes.io/projected/4f5cf9f6-a48b-455b-aef3-952697eb1a09-kube-api-access-gv5gn\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.885972 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-combined-ca-bundle\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.886142 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data-custom\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.886315 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-public-tls-certs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.987137 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988176 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f5cf9f6-a48b-455b-aef3-952697eb1a09-logs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988287 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-internal-tls-certs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988316 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv5gn\" (UniqueName: \"kubernetes.io/projected/4f5cf9f6-a48b-455b-aef3-952697eb1a09-kube-api-access-gv5gn\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988349 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-combined-ca-bundle\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988396 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data-custom\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988435 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-public-tls-certs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.988468 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:36 crc kubenswrapper[4822]: I1201 07:11:36.989510 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f5cf9f6-a48b-455b-aef3-952697eb1a09-logs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.000031 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data-custom\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.001340 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-internal-tls-certs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.004088 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.004342 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-combined-ca-bundle\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.004726 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-public-tls-certs\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.044207 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv5gn\" (UniqueName: \"kubernetes.io/projected/4f5cf9f6-a48b-455b-aef3-952697eb1a09-kube-api-access-gv5gn\") pod \"barbican-api-6bdbb4cd8b-5ttmd\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.130320 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.530860 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api-log" containerID="cri-o://cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66" gracePeriod=30 Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.533182 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api" containerID="cri-o://53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd" gracePeriod=30 Dec 01 07:11:37 crc kubenswrapper[4822]: I1201 07:11:37.631506 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6bdbb4cd8b-5ttmd"] Dec 01 07:11:37 crc kubenswrapper[4822]: W1201 07:11:37.649023 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f5cf9f6_a48b_455b_aef3_952697eb1a09.slice/crio-4a89ea1e3c447a88bb5d40e8e613938ea48ad7d41aaeb9ec0c67b3af6a90a9f9 WatchSource:0}: Error finding container 4a89ea1e3c447a88bb5d40e8e613938ea48ad7d41aaeb9ec0c67b3af6a90a9f9: Status 404 returned error can't find the container with id 4a89ea1e3c447a88bb5d40e8e613938ea48ad7d41aaeb9ec0c67b3af6a90a9f9 Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.072480 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109110 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-combined-ca-bundle\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109199 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data-custom\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109302 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109324 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-scripts\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109459 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnv5g\" (UniqueName: \"kubernetes.io/projected/e0e21c31-73a8-4980-9566-fe836549e8b5-kube-api-access-vnv5g\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109485 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e21c31-73a8-4980-9566-fe836549e8b5-etc-machine-id\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.109510 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e21c31-73a8-4980-9566-fe836549e8b5-logs\") pod \"e0e21c31-73a8-4980-9566-fe836549e8b5\" (UID: \"e0e21c31-73a8-4980-9566-fe836549e8b5\") " Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.110346 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0e21c31-73a8-4980-9566-fe836549e8b5-logs" (OuterVolumeSpecName: "logs") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.111185 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0e21c31-73a8-4980-9566-fe836549e8b5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.119053 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.119134 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-scripts" (OuterVolumeSpecName: "scripts") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.123099 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0e21c31-73a8-4980-9566-fe836549e8b5-kube-api-access-vnv5g" (OuterVolumeSpecName: "kube-api-access-vnv5g") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "kube-api-access-vnv5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.145450 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.167915 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data" (OuterVolumeSpecName: "config-data") pod "e0e21c31-73a8-4980-9566-fe836549e8b5" (UID: "e0e21c31-73a8-4980-9566-fe836549e8b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212204 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212235 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212246 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212254 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e21c31-73a8-4980-9566-fe836549e8b5-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212262 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnv5g\" (UniqueName: \"kubernetes.io/projected/e0e21c31-73a8-4980-9566-fe836549e8b5-kube-api-access-vnv5g\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212273 4822 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0e21c31-73a8-4980-9566-fe836549e8b5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.212281 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e21c31-73a8-4980-9566-fe836549e8b5-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539133 4822 generic.go:334] "Generic (PLEG): container finished" podID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerID="53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd" exitCode=0 Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539172 4822 generic.go:334] "Generic (PLEG): container finished" podID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerID="cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66" exitCode=143 Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539177 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e0e21c31-73a8-4980-9566-fe836549e8b5","Type":"ContainerDied","Data":"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539196 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539217 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e0e21c31-73a8-4980-9566-fe836549e8b5","Type":"ContainerDied","Data":"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539229 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e0e21c31-73a8-4980-9566-fe836549e8b5","Type":"ContainerDied","Data":"db918edb5286ed98320e0dab9ab75a9d313e416d366a31cf8eef8bb15ea1c82a"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.539230 4822 scope.go:117] "RemoveContainer" containerID="53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.542870 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerStarted","Data":"abe19f43b5349af7f94c8bf8c6cfb7cce95116ab44832bc5a412b0133ddf61b1"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.542928 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.552025 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" event={"ID":"4f5cf9f6-a48b-455b-aef3-952697eb1a09","Type":"ContainerStarted","Data":"3e945f46d94da2037046bdb5e78c0bab8aeb085af05bb39debf197f614a96054"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.552367 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" event={"ID":"4f5cf9f6-a48b-455b-aef3-952697eb1a09","Type":"ContainerStarted","Data":"b2656b9a99cb8b9b157e9e99731e4fa691f7956b8db22c14b097df4e6e5d524e"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.552386 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.552398 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" event={"ID":"4f5cf9f6-a48b-455b-aef3-952697eb1a09","Type":"ContainerStarted","Data":"4a89ea1e3c447a88bb5d40e8e613938ea48ad7d41aaeb9ec0c67b3af6a90a9f9"} Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.552412 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.572381 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.735239859 podStartE2EDuration="6.572362481s" podCreationTimestamp="2025-12-01 07:11:32 +0000 UTC" firstStartedPulling="2025-12-01 07:11:33.613287186 +0000 UTC m=+1248.934094872" lastFinishedPulling="2025-12-01 07:11:37.450409808 +0000 UTC m=+1252.771217494" observedRunningTime="2025-12-01 07:11:38.563929314 +0000 UTC m=+1253.884737010" watchObservedRunningTime="2025-12-01 07:11:38.572362481 +0000 UTC m=+1253.893170167" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.577658 4822 scope.go:117] "RemoveContainer" containerID="cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.587109 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" podStartSLOduration=2.587091975 podStartE2EDuration="2.587091975s" podCreationTimestamp="2025-12-01 07:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:38.582398033 +0000 UTC m=+1253.903205719" watchObservedRunningTime="2025-12-01 07:11:38.587091975 +0000 UTC m=+1253.907899661" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.609617 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.630829 4822 scope.go:117] "RemoveContainer" containerID="53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd" Dec 01 07:11:38 crc kubenswrapper[4822]: E1201 07:11:38.633885 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd\": container with ID starting with 53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd not found: ID does not exist" containerID="53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.633953 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd"} err="failed to get container status \"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd\": rpc error: code = NotFound desc = could not find container \"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd\": container with ID starting with 53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd not found: ID does not exist" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.633982 4822 scope.go:117] "RemoveContainer" containerID="cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66" Dec 01 07:11:38 crc kubenswrapper[4822]: E1201 07:11:38.634649 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66\": container with ID starting with cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66 not found: ID does not exist" containerID="cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.634692 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66"} err="failed to get container status \"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66\": rpc error: code = NotFound desc = could not find container \"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66\": container with ID starting with cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66 not found: ID does not exist" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.634817 4822 scope.go:117] "RemoveContainer" containerID="53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.635401 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd"} err="failed to get container status \"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd\": rpc error: code = NotFound desc = could not find container \"53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd\": container with ID starting with 53d4be0242aabca7052f1580ae5734df1cb7ddbf2637ce6f29f046a956ecddbd not found: ID does not exist" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.635426 4822 scope.go:117] "RemoveContainer" containerID="cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.635962 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66"} err="failed to get container status \"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66\": rpc error: code = NotFound desc = could not find container \"cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66\": container with ID starting with cd5dafc774c371e12089a59468327dffad7544d0d7b6b8c62109d6ded7c37f66 not found: ID does not exist" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.648971 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.681387 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:38 crc kubenswrapper[4822]: E1201 07:11:38.681863 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api-log" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.681886 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api-log" Dec 01 07:11:38 crc kubenswrapper[4822]: E1201 07:11:38.681913 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.681922 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.682149 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.682188 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" containerName="cinder-api-log" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.683380 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.685972 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.686152 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.687085 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.694345 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732394 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732497 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732710 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data-custom\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732741 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732825 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-logs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732881 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhhlr\" (UniqueName: \"kubernetes.io/projected/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-kube-api-access-rhhlr\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732948 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.732972 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.733012 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-scripts\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.834995 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835298 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835381 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-scripts\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835512 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835626 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835715 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data-custom\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835790 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835864 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-logs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.835935 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhhlr\" (UniqueName: \"kubernetes.io/projected/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-kube-api-access-rhhlr\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.836327 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-logs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.837058 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.842378 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-scripts\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.842621 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.842686 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.842754 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.843780 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data-custom\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.844473 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.853809 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhhlr\" (UniqueName: \"kubernetes.io/projected/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-kube-api-access-rhhlr\") pod \"cinder-api-0\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " pod="openstack/cinder-api-0" Dec 01 07:11:38 crc kubenswrapper[4822]: I1201 07:11:38.969224 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0e21c31-73a8-4980-9566-fe836549e8b5" path="/var/lib/kubelet/pods/e0e21c31-73a8-4980-9566-fe836549e8b5/volumes" Dec 01 07:11:39 crc kubenswrapper[4822]: I1201 07:11:39.004120 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:11:39 crc kubenswrapper[4822]: W1201 07:11:39.559447 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22f94321_d0ce_48f5_82fa_a0b60b5b1dd3.slice/crio-89d9570caf317d01127d51a05da6049d9afacaf1d133effd375fd9042ecaf7b8 WatchSource:0}: Error finding container 89d9570caf317d01127d51a05da6049d9afacaf1d133effd375fd9042ecaf7b8: Status 404 returned error can't find the container with id 89d9570caf317d01127d51a05da6049d9afacaf1d133effd375fd9042ecaf7b8 Dec 01 07:11:39 crc kubenswrapper[4822]: I1201 07:11:39.571764 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:11:39 crc kubenswrapper[4822]: I1201 07:11:39.581932 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3","Type":"ContainerStarted","Data":"89d9570caf317d01127d51a05da6049d9afacaf1d133effd375fd9042ecaf7b8"} Dec 01 07:11:40 crc kubenswrapper[4822]: I1201 07:11:40.521873 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:40 crc kubenswrapper[4822]: I1201 07:11:40.604017 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3","Type":"ContainerStarted","Data":"c74bb3693663962c268382b1974734e4c228ed0e61b1129daf7f28351b85cae6"} Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.016676 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.282041 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.282971 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.284075 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.302765 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-mnlmw"] Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.303070 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerName="dnsmasq-dns" containerID="cri-o://6a6f8600e82ab47468d8adcd7d669f007b4eab85a3625dca88b611e3c0d5e275" gracePeriod=10 Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.353952 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.665174 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3","Type":"ContainerStarted","Data":"da2877c86b5adb431c13ff28bb62c50e650b56f0e6e01accc5c938c8b253a36c"} Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.665542 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.668822 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerID="6a6f8600e82ab47468d8adcd7d669f007b4eab85a3625dca88b611e3c0d5e275" exitCode=0 Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.668968 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" event={"ID":"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a","Type":"ContainerDied","Data":"6a6f8600e82ab47468d8adcd7d669f007b4eab85a3625dca88b611e3c0d5e275"} Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.669163 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="cinder-scheduler" containerID="cri-o://ffa9069e31e7e4f177836e190599b849cde3830e468dafa8ed7d62114db3aae7" gracePeriod=30 Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.669251 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="probe" containerID="cri-o://d8ae9f3f72c10090b3836cca4c19f9fbb08bae4e11edeb4b6da22d259ef203dc" gracePeriod=30 Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.693759 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.693740508 podStartE2EDuration="4.693740508s" podCreationTimestamp="2025-12-01 07:11:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:42.690843366 +0000 UTC m=+1258.011651052" watchObservedRunningTime="2025-12-01 07:11:42.693740508 +0000 UTC m=+1258.014548194" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.869182 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.997658 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-swift-storage-0\") pod \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.997825 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-config\") pod \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.997872 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-nb\") pod \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.997933 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-sb\") pod \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.997993 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sztjs\" (UniqueName: \"kubernetes.io/projected/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-kube-api-access-sztjs\") pod \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " Dec 01 07:11:42 crc kubenswrapper[4822]: I1201 07:11:42.998030 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-svc\") pod \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\" (UID: \"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a\") " Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.003788 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-kube-api-access-sztjs" (OuterVolumeSpecName: "kube-api-access-sztjs") pod "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" (UID: "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a"). InnerVolumeSpecName "kube-api-access-sztjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.049813 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" (UID: "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.050406 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" (UID: "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.064668 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" (UID: "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.068622 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" (UID: "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.071033 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-config" (OuterVolumeSpecName: "config") pod "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" (UID: "a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.099961 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.099999 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sztjs\" (UniqueName: \"kubernetes.io/projected/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-kube-api-access-sztjs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.100013 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.100024 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.100034 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.100043 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.314147 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.396231 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-64649776b-r955c"] Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.396453 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-64649776b-r955c" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-api" containerID="cri-o://5b2b1e1cb647203da9ad9d042fde915571a9f033d00ba41481a665be92106ea1" gracePeriod=30 Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.396603 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-64649776b-r955c" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-httpd" containerID="cri-o://6ffe94d73e5f581649887769ef4860b7e9405ae002148fd3a8016e28d98b728c" gracePeriod=30 Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.686917 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" event={"ID":"a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a","Type":"ContainerDied","Data":"361ee547f2583050c1858170ea07374aebe48411d4f632dd18f2225388d6c223"} Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.687287 4822 scope.go:117] "RemoveContainer" containerID="6a6f8600e82ab47468d8adcd7d669f007b4eab85a3625dca88b611e3c0d5e275" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.687409 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-mnlmw" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.729935 4822 generic.go:334] "Generic (PLEG): container finished" podID="14297bb6-88ad-438f-afb1-af681311e3b5" containerID="d8ae9f3f72c10090b3836cca4c19f9fbb08bae4e11edeb4b6da22d259ef203dc" exitCode=0 Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.730041 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"14297bb6-88ad-438f-afb1-af681311e3b5","Type":"ContainerDied","Data":"d8ae9f3f72c10090b3836cca4c19f9fbb08bae4e11edeb4b6da22d259ef203dc"} Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.751092 4822 generic.go:334] "Generic (PLEG): container finished" podID="082bea40-1a34-4711-8c74-dbf325eb5658" containerID="6ffe94d73e5f581649887769ef4860b7e9405ae002148fd3a8016e28d98b728c" exitCode=0 Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.751355 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64649776b-r955c" event={"ID":"082bea40-1a34-4711-8c74-dbf325eb5658","Type":"ContainerDied","Data":"6ffe94d73e5f581649887769ef4860b7e9405ae002148fd3a8016e28d98b728c"} Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.752687 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-mnlmw"] Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.753886 4822 scope.go:117] "RemoveContainer" containerID="90696d8805c61145f07681a10dc909b6d868a96efeec4eb77b7e907f6b2d2f6b" Dec 01 07:11:43 crc kubenswrapper[4822]: I1201 07:11:43.785426 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-mnlmw"] Dec 01 07:11:44 crc kubenswrapper[4822]: I1201 07:11:44.052204 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:44 crc kubenswrapper[4822]: I1201 07:11:44.968675 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" path="/var/lib/kubelet/pods/a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a/volumes" Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.468272 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.526130 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7649fdcd84-lj592"] Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.526349 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7649fdcd84-lj592" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api-log" containerID="cri-o://fa1a42c217e8af5cb3cf84551074fc2dad34352132042251e52f7f02cce1197f" gracePeriod=30 Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.526450 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7649fdcd84-lj592" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api" containerID="cri-o://6056a5b46da462ad08ba967cfabd58573a3cf9478d8c00cc9963f0ee5d8aad8e" gracePeriod=30 Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.780800 4822 generic.go:334] "Generic (PLEG): container finished" podID="082bea40-1a34-4711-8c74-dbf325eb5658" containerID="5b2b1e1cb647203da9ad9d042fde915571a9f033d00ba41481a665be92106ea1" exitCode=0 Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.781020 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64649776b-r955c" event={"ID":"082bea40-1a34-4711-8c74-dbf325eb5658","Type":"ContainerDied","Data":"5b2b1e1cb647203da9ad9d042fde915571a9f033d00ba41481a665be92106ea1"} Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.784023 4822 generic.go:334] "Generic (PLEG): container finished" podID="f5fd8bbb-0020-435d-8934-47840b591495" containerID="fa1a42c217e8af5cb3cf84551074fc2dad34352132042251e52f7f02cce1197f" exitCode=143 Dec 01 07:11:45 crc kubenswrapper[4822]: I1201 07:11:45.784056 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7649fdcd84-lj592" event={"ID":"f5fd8bbb-0020-435d-8934-47840b591495","Type":"ContainerDied","Data":"fa1a42c217e8af5cb3cf84551074fc2dad34352132042251e52f7f02cce1197f"} Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.199244 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.361951 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-httpd-config\") pod \"082bea40-1a34-4711-8c74-dbf325eb5658\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.362288 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6skdb\" (UniqueName: \"kubernetes.io/projected/082bea40-1a34-4711-8c74-dbf325eb5658-kube-api-access-6skdb\") pod \"082bea40-1a34-4711-8c74-dbf325eb5658\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.362330 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-ovndb-tls-certs\") pod \"082bea40-1a34-4711-8c74-dbf325eb5658\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.362382 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-combined-ca-bundle\") pod \"082bea40-1a34-4711-8c74-dbf325eb5658\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.362446 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-config\") pod \"082bea40-1a34-4711-8c74-dbf325eb5658\" (UID: \"082bea40-1a34-4711-8c74-dbf325eb5658\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.379785 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "082bea40-1a34-4711-8c74-dbf325eb5658" (UID: "082bea40-1a34-4711-8c74-dbf325eb5658"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.379840 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/082bea40-1a34-4711-8c74-dbf325eb5658-kube-api-access-6skdb" (OuterVolumeSpecName: "kube-api-access-6skdb") pod "082bea40-1a34-4711-8c74-dbf325eb5658" (UID: "082bea40-1a34-4711-8c74-dbf325eb5658"). InnerVolumeSpecName "kube-api-access-6skdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.416605 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-config" (OuterVolumeSpecName: "config") pod "082bea40-1a34-4711-8c74-dbf325eb5658" (UID: "082bea40-1a34-4711-8c74-dbf325eb5658"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.418841 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "082bea40-1a34-4711-8c74-dbf325eb5658" (UID: "082bea40-1a34-4711-8c74-dbf325eb5658"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.454493 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "082bea40-1a34-4711-8c74-dbf325eb5658" (UID: "082bea40-1a34-4711-8c74-dbf325eb5658"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.472117 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.472170 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.472187 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.472200 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6skdb\" (UniqueName: \"kubernetes.io/projected/082bea40-1a34-4711-8c74-dbf325eb5658-kube-api-access-6skdb\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.472215 4822 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/082bea40-1a34-4711-8c74-dbf325eb5658-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.804919 4822 generic.go:334] "Generic (PLEG): container finished" podID="14297bb6-88ad-438f-afb1-af681311e3b5" containerID="ffa9069e31e7e4f177836e190599b849cde3830e468dafa8ed7d62114db3aae7" exitCode=0 Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.804987 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"14297bb6-88ad-438f-afb1-af681311e3b5","Type":"ContainerDied","Data":"ffa9069e31e7e4f177836e190599b849cde3830e468dafa8ed7d62114db3aae7"} Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.805390 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"14297bb6-88ad-438f-afb1-af681311e3b5","Type":"ContainerDied","Data":"dc39dee31744b3f291bfcfe5f2f41455d3405b7d2709cea7f5aa60a1a8c561ec"} Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.805418 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc39dee31744b3f291bfcfe5f2f41455d3405b7d2709cea7f5aa60a1a8c561ec" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.812653 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64649776b-r955c" event={"ID":"082bea40-1a34-4711-8c74-dbf325eb5658","Type":"ContainerDied","Data":"2bed2e52b55108ed9d3499e9e71838fd0b60c87cd3c1b8ef8610a8da02e1029d"} Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.812722 4822 scope.go:117] "RemoveContainer" containerID="6ffe94d73e5f581649887769ef4860b7e9405ae002148fd3a8016e28d98b728c" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.812888 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64649776b-r955c" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.871663 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.885071 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-64649776b-r955c"] Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.893767 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-64649776b-r955c"] Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.895184 4822 scope.go:117] "RemoveContainer" containerID="5b2b1e1cb647203da9ad9d042fde915571a9f033d00ba41481a665be92106ea1" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.966405 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" path="/var/lib/kubelet/pods/082bea40-1a34-4711-8c74-dbf325eb5658/volumes" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987334 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data-custom\") pod \"14297bb6-88ad-438f-afb1-af681311e3b5\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987573 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-combined-ca-bundle\") pod \"14297bb6-88ad-438f-afb1-af681311e3b5\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987614 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/14297bb6-88ad-438f-afb1-af681311e3b5-etc-machine-id\") pod \"14297bb6-88ad-438f-afb1-af681311e3b5\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987641 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data\") pod \"14297bb6-88ad-438f-afb1-af681311e3b5\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987670 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6h2sv\" (UniqueName: \"kubernetes.io/projected/14297bb6-88ad-438f-afb1-af681311e3b5-kube-api-access-6h2sv\") pod \"14297bb6-88ad-438f-afb1-af681311e3b5\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987739 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-scripts\") pod \"14297bb6-88ad-438f-afb1-af681311e3b5\" (UID: \"14297bb6-88ad-438f-afb1-af681311e3b5\") " Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.987998 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/14297bb6-88ad-438f-afb1-af681311e3b5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "14297bb6-88ad-438f-afb1-af681311e3b5" (UID: "14297bb6-88ad-438f-afb1-af681311e3b5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.988606 4822 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/14297bb6-88ad-438f-afb1-af681311e3b5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.992654 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-scripts" (OuterVolumeSpecName: "scripts") pod "14297bb6-88ad-438f-afb1-af681311e3b5" (UID: "14297bb6-88ad-438f-afb1-af681311e3b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.993442 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14297bb6-88ad-438f-afb1-af681311e3b5-kube-api-access-6h2sv" (OuterVolumeSpecName: "kube-api-access-6h2sv") pod "14297bb6-88ad-438f-afb1-af681311e3b5" (UID: "14297bb6-88ad-438f-afb1-af681311e3b5"). InnerVolumeSpecName "kube-api-access-6h2sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:46 crc kubenswrapper[4822]: I1201 07:11:46.995585 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "14297bb6-88ad-438f-afb1-af681311e3b5" (UID: "14297bb6-88ad-438f-afb1-af681311e3b5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.053466 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14297bb6-88ad-438f-afb1-af681311e3b5" (UID: "14297bb6-88ad-438f-afb1-af681311e3b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.092344 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.092386 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6h2sv\" (UniqueName: \"kubernetes.io/projected/14297bb6-88ad-438f-afb1-af681311e3b5-kube-api-access-6h2sv\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.092402 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.092414 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.113401 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data" (OuterVolumeSpecName: "config-data") pod "14297bb6-88ad-438f-afb1-af681311e3b5" (UID: "14297bb6-88ad-438f-afb1-af681311e3b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.194219 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14297bb6-88ad-438f-afb1-af681311e3b5-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.826049 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.866208 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.874148 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.891770 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:47 crc kubenswrapper[4822]: E1201 07:11:47.892202 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerName="dnsmasq-dns" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892226 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerName="dnsmasq-dns" Dec 01 07:11:47 crc kubenswrapper[4822]: E1201 07:11:47.892242 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerName="init" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892250 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerName="init" Dec 01 07:11:47 crc kubenswrapper[4822]: E1201 07:11:47.892269 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-api" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892280 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-api" Dec 01 07:11:47 crc kubenswrapper[4822]: E1201 07:11:47.892295 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="cinder-scheduler" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892302 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="cinder-scheduler" Dec 01 07:11:47 crc kubenswrapper[4822]: E1201 07:11:47.892326 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-httpd" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892334 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-httpd" Dec 01 07:11:47 crc kubenswrapper[4822]: E1201 07:11:47.892353 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="probe" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892360 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="probe" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892649 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="cinder-scheduler" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892666 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-httpd" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892693 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="082bea40-1a34-4711-8c74-dbf325eb5658" containerName="neutron-api" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892707 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1b1dfd0-f21e-4d4a-8646-0ba6b168e24a" containerName="dnsmasq-dns" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.892719 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" containerName="probe" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.894017 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.898731 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 07:11:47 crc kubenswrapper[4822]: I1201 07:11:47.912383 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.007706 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.007776 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.007807 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-scripts\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.007862 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.008245 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmrgv\" (UniqueName: \"kubernetes.io/projected/946e1406-8dc2-445d-9d5a-f801260d554b-kube-api-access-vmrgv\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.008343 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/946e1406-8dc2-445d-9d5a-f801260d554b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.110166 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmrgv\" (UniqueName: \"kubernetes.io/projected/946e1406-8dc2-445d-9d5a-f801260d554b-kube-api-access-vmrgv\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.110720 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/946e1406-8dc2-445d-9d5a-f801260d554b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.110811 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/946e1406-8dc2-445d-9d5a-f801260d554b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.110864 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.111761 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.111811 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-scripts\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.111930 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.119830 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.119994 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-scripts\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.120132 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.120793 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.134343 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmrgv\" (UniqueName: \"kubernetes.io/projected/946e1406-8dc2-445d-9d5a-f801260d554b-kube-api-access-vmrgv\") pod \"cinder-scheduler-0\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.214507 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.684641 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.722340 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7649fdcd84-lj592" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:58190->10.217.0.158:9311: read: connection reset by peer" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.722708 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7649fdcd84-lj592" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:58194->10.217.0.158:9311: read: connection reset by peer" Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.840188 4822 generic.go:334] "Generic (PLEG): container finished" podID="f5fd8bbb-0020-435d-8934-47840b591495" containerID="6056a5b46da462ad08ba967cfabd58573a3cf9478d8c00cc9963f0ee5d8aad8e" exitCode=0 Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.840289 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7649fdcd84-lj592" event={"ID":"f5fd8bbb-0020-435d-8934-47840b591495","Type":"ContainerDied","Data":"6056a5b46da462ad08ba967cfabd58573a3cf9478d8c00cc9963f0ee5d8aad8e"} Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.841969 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"946e1406-8dc2-445d-9d5a-f801260d554b","Type":"ContainerStarted","Data":"c6e2045f220f67ded2ef2526eb5154e5827a7a90202dabf76ff92719b4d10d5f"} Dec 01 07:11:48 crc kubenswrapper[4822]: I1201 07:11:48.967670 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14297bb6-88ad-438f-afb1-af681311e3b5" path="/var/lib/kubelet/pods/14297bb6-88ad-438f-afb1-af681311e3b5/volumes" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.162526 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.338319 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-992xp\" (UniqueName: \"kubernetes.io/projected/f5fd8bbb-0020-435d-8934-47840b591495-kube-api-access-992xp\") pod \"f5fd8bbb-0020-435d-8934-47840b591495\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.338705 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data\") pod \"f5fd8bbb-0020-435d-8934-47840b591495\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.338867 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-combined-ca-bundle\") pod \"f5fd8bbb-0020-435d-8934-47840b591495\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.338935 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data-custom\") pod \"f5fd8bbb-0020-435d-8934-47840b591495\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.338960 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5fd8bbb-0020-435d-8934-47840b591495-logs\") pod \"f5fd8bbb-0020-435d-8934-47840b591495\" (UID: \"f5fd8bbb-0020-435d-8934-47840b591495\") " Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.339776 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5fd8bbb-0020-435d-8934-47840b591495-logs" (OuterVolumeSpecName: "logs") pod "f5fd8bbb-0020-435d-8934-47840b591495" (UID: "f5fd8bbb-0020-435d-8934-47840b591495"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.342292 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5fd8bbb-0020-435d-8934-47840b591495-kube-api-access-992xp" (OuterVolumeSpecName: "kube-api-access-992xp") pod "f5fd8bbb-0020-435d-8934-47840b591495" (UID: "f5fd8bbb-0020-435d-8934-47840b591495"). InnerVolumeSpecName "kube-api-access-992xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.344017 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f5fd8bbb-0020-435d-8934-47840b591495" (UID: "f5fd8bbb-0020-435d-8934-47840b591495"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.366936 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5fd8bbb-0020-435d-8934-47840b591495" (UID: "f5fd8bbb-0020-435d-8934-47840b591495"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.388748 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data" (OuterVolumeSpecName: "config-data") pod "f5fd8bbb-0020-435d-8934-47840b591495" (UID: "f5fd8bbb-0020-435d-8934-47840b591495"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.441162 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.441204 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5fd8bbb-0020-435d-8934-47840b591495-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.441218 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.441229 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-992xp\" (UniqueName: \"kubernetes.io/projected/f5fd8bbb-0020-435d-8934-47840b591495-kube-api-access-992xp\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.441245 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5fd8bbb-0020-435d-8934-47840b591495-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.855772 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7649fdcd84-lj592" event={"ID":"f5fd8bbb-0020-435d-8934-47840b591495","Type":"ContainerDied","Data":"fc04a4885f1ccb942a86ecd673f0ba73ec8d958a53c4a6877960a1989d472940"} Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.855822 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7649fdcd84-lj592" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.855848 4822 scope.go:117] "RemoveContainer" containerID="6056a5b46da462ad08ba967cfabd58573a3cf9478d8c00cc9963f0ee5d8aad8e" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.863429 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"946e1406-8dc2-445d-9d5a-f801260d554b","Type":"ContainerStarted","Data":"adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060"} Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.895130 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7649fdcd84-lj592"] Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.900906 4822 scope.go:117] "RemoveContainer" containerID="fa1a42c217e8af5cb3cf84551074fc2dad34352132042251e52f7f02cce1197f" Dec 01 07:11:49 crc kubenswrapper[4822]: I1201 07:11:49.901472 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7649fdcd84-lj592"] Dec 01 07:11:50 crc kubenswrapper[4822]: I1201 07:11:50.828428 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 01 07:11:50 crc kubenswrapper[4822]: I1201 07:11:50.890800 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"946e1406-8dc2-445d-9d5a-f801260d554b","Type":"ContainerStarted","Data":"619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd"} Dec 01 07:11:50 crc kubenswrapper[4822]: I1201 07:11:50.925776 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.925739857 podStartE2EDuration="3.925739857s" podCreationTimestamp="2025-12-01 07:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:11:50.917772613 +0000 UTC m=+1266.238580319" watchObservedRunningTime="2025-12-01 07:11:50.925739857 +0000 UTC m=+1266.246547563" Dec 01 07:11:50 crc kubenswrapper[4822]: I1201 07:11:50.980833 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5fd8bbb-0020-435d-8934-47840b591495" path="/var/lib/kubelet/pods/f5fd8bbb-0020-435d-8934-47840b591495/volumes" Dec 01 07:11:53 crc kubenswrapper[4822]: I1201 07:11:53.215333 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 07:11:53 crc kubenswrapper[4822]: I1201 07:11:53.450067 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:53 crc kubenswrapper[4822]: I1201 07:11:53.450123 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:11:53 crc kubenswrapper[4822]: I1201 07:11:53.877065 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.297937 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 01 07:11:57 crc kubenswrapper[4822]: E1201 07:11:57.299767 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.299796 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api" Dec 01 07:11:57 crc kubenswrapper[4822]: E1201 07:11:57.299884 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api-log" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.299896 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api-log" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.300190 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.300235 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5fd8bbb-0020-435d-8934-47840b591495" containerName="barbican-api-log" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.303110 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.305734 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-qkp46" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.306324 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.307436 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.317463 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.343478 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config-secret\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.343659 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.343716 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.343829 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdsp4\" (UniqueName: \"kubernetes.io/projected/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-kube-api-access-wdsp4\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.445260 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdsp4\" (UniqueName: \"kubernetes.io/projected/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-kube-api-access-wdsp4\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.445372 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config-secret\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.445467 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.445500 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.447495 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.456328 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config-secret\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.456435 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.468194 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdsp4\" (UniqueName: \"kubernetes.io/projected/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-kube-api-access-wdsp4\") pod \"openstackclient\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " pod="openstack/openstackclient" Dec 01 07:11:57 crc kubenswrapper[4822]: I1201 07:11:57.666999 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 07:11:58 crc kubenswrapper[4822]: I1201 07:11:58.204045 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 07:11:58 crc kubenswrapper[4822]: I1201 07:11:58.574165 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 07:11:58 crc kubenswrapper[4822]: I1201 07:11:58.991709 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4","Type":"ContainerStarted","Data":"49b555198ad27730766ff64e30800943be74d0e29811a4e5ec1ecad39afbec1d"} Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.618929 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-685677fbf5-d4ncs"] Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.621308 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.631416 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.631502 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.642988 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-685677fbf5-d4ncs"] Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.648514 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.728799 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-combined-ca-bundle\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.728885 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bjnx\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-kube-api-access-7bjnx\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.728942 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-etc-swift\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.728969 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-config-data\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.729031 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-internal-tls-certs\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.729068 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-run-httpd\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.729163 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-public-tls-certs\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.729209 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-log-httpd\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.831639 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-log-httpd\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.831719 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-combined-ca-bundle\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.831810 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bjnx\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-kube-api-access-7bjnx\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.832312 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-log-httpd\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.832853 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-etc-swift\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.832952 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-config-data\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.833122 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-internal-tls-certs\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.833162 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-run-httpd\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.833323 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-public-tls-certs\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.833583 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-run-httpd\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.853836 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-public-tls-certs\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.854349 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-internal-tls-certs\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.854409 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-etc-swift\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.854424 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-config-data\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.856383 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-combined-ca-bundle\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.857187 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bjnx\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-kube-api-access-7bjnx\") pod \"swift-proxy-685677fbf5-d4ncs\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:01 crc kubenswrapper[4822]: I1201 07:12:01.946163 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:02 crc kubenswrapper[4822]: I1201 07:12:02.640067 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-685677fbf5-d4ncs"] Dec 01 07:12:02 crc kubenswrapper[4822]: I1201 07:12:02.747735 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 07:12:04 crc kubenswrapper[4822]: I1201 07:12:04.786672 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:04 crc kubenswrapper[4822]: I1201 07:12:04.787230 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-central-agent" containerID="cri-o://59c79f57c551b38dcff370abaa16d1ece43bee9e5687047a0bb2960aa500c9bd" gracePeriod=30 Dec 01 07:12:04 crc kubenswrapper[4822]: I1201 07:12:04.787754 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="proxy-httpd" containerID="cri-o://abe19f43b5349af7f94c8bf8c6cfb7cce95116ab44832bc5a412b0133ddf61b1" gracePeriod=30 Dec 01 07:12:04 crc kubenswrapper[4822]: I1201 07:12:04.787799 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="sg-core" containerID="cri-o://4fc9d1e1036db88f60a278580b3c351e88f7e3893ae0bdbdd4d80ffdae35a520" gracePeriod=30 Dec 01 07:12:04 crc kubenswrapper[4822]: I1201 07:12:04.787829 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-notification-agent" containerID="cri-o://c34156c2014761490e0d1d9d3e71e9381093b77f6e26e24ad7138af2f868ad62" gracePeriod=30 Dec 01 07:12:05 crc kubenswrapper[4822]: I1201 07:12:05.049971 4822 generic.go:334] "Generic (PLEG): container finished" podID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerID="abe19f43b5349af7f94c8bf8c6cfb7cce95116ab44832bc5a412b0133ddf61b1" exitCode=0 Dec 01 07:12:05 crc kubenswrapper[4822]: I1201 07:12:05.050747 4822 generic.go:334] "Generic (PLEG): container finished" podID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerID="4fc9d1e1036db88f60a278580b3c351e88f7e3893ae0bdbdd4d80ffdae35a520" exitCode=2 Dec 01 07:12:05 crc kubenswrapper[4822]: I1201 07:12:05.050051 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerDied","Data":"abe19f43b5349af7f94c8bf8c6cfb7cce95116ab44832bc5a412b0133ddf61b1"} Dec 01 07:12:05 crc kubenswrapper[4822]: I1201 07:12:05.050923 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerDied","Data":"4fc9d1e1036db88f60a278580b3c351e88f7e3893ae0bdbdd4d80ffdae35a520"} Dec 01 07:12:06 crc kubenswrapper[4822]: I1201 07:12:06.065687 4822 generic.go:334] "Generic (PLEG): container finished" podID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerID="59c79f57c551b38dcff370abaa16d1ece43bee9e5687047a0bb2960aa500c9bd" exitCode=0 Dec 01 07:12:06 crc kubenswrapper[4822]: I1201 07:12:06.065728 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerDied","Data":"59c79f57c551b38dcff370abaa16d1ece43bee9e5687047a0bb2960aa500c9bd"} Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.079402 4822 generic.go:334] "Generic (PLEG): container finished" podID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerID="c34156c2014761490e0d1d9d3e71e9381093b77f6e26e24ad7138af2f868ad62" exitCode=0 Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.079471 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerDied","Data":"c34156c2014761490e0d1d9d3e71e9381093b77f6e26e24ad7138af2f868ad62"} Dec 01 07:12:07 crc kubenswrapper[4822]: W1201 07:12:07.429492 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f721b33_b6df_4e86_90bc_52a7d0c49cbf.slice/crio-36e01835fa026f3ef7ff9590d2d93686fbe84870121b26ee7c609a74ed071510 WatchSource:0}: Error finding container 36e01835fa026f3ef7ff9590d2d93686fbe84870121b26ee7c609a74ed071510: Status 404 returned error can't find the container with id 36e01835fa026f3ef7ff9590d2d93686fbe84870121b26ee7c609a74ed071510 Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.747137 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790648 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-log-httpd\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790716 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7zgt\" (UniqueName: \"kubernetes.io/projected/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-kube-api-access-l7zgt\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790763 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-scripts\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790789 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-combined-ca-bundle\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790809 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-run-httpd\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790829 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-sg-core-conf-yaml\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.790847 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-config-data\") pod \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\" (UID: \"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862\") " Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.794646 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.795010 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-scripts" (OuterVolumeSpecName: "scripts") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.795406 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.798775 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-kube-api-access-l7zgt" (OuterVolumeSpecName: "kube-api-access-l7zgt") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "kube-api-access-l7zgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.829338 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.888796 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.892991 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.893039 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7zgt\" (UniqueName: \"kubernetes.io/projected/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-kube-api-access-l7zgt\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.893056 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.893068 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.893080 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.893091 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.925671 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-config-data" (OuterVolumeSpecName: "config-data") pod "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" (UID: "e2db9ac4-2fdc-414c-ac4a-9b9c187ed862"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:07 crc kubenswrapper[4822]: I1201 07:12:07.994901 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.089915 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-685677fbf5-d4ncs" event={"ID":"7f721b33-b6df-4e86-90bc-52a7d0c49cbf","Type":"ContainerStarted","Data":"58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60"} Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.089956 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-685677fbf5-d4ncs" event={"ID":"7f721b33-b6df-4e86-90bc-52a7d0c49cbf","Type":"ContainerStarted","Data":"34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0"} Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.089968 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-685677fbf5-d4ncs" event={"ID":"7f721b33-b6df-4e86-90bc-52a7d0c49cbf","Type":"ContainerStarted","Data":"36e01835fa026f3ef7ff9590d2d93686fbe84870121b26ee7c609a74ed071510"} Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.090383 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.090500 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.092396 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4","Type":"ContainerStarted","Data":"e048e44ad53b2aa10bcb223a22d1436e3fc429ff83707e605d3e5616b88398a4"} Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.095433 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e2db9ac4-2fdc-414c-ac4a-9b9c187ed862","Type":"ContainerDied","Data":"084bfc44e6b7192a948ad47ac5b427bc5fab3a8b0ce1e8189dc3faa90c024320"} Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.095475 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.095489 4822 scope.go:117] "RemoveContainer" containerID="abe19f43b5349af7f94c8bf8c6cfb7cce95116ab44832bc5a412b0133ddf61b1" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.115391 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-685677fbf5-d4ncs" podStartSLOduration=7.115373334 podStartE2EDuration="7.115373334s" podCreationTimestamp="2025-12-01 07:12:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:08.10705219 +0000 UTC m=+1283.427859876" watchObservedRunningTime="2025-12-01 07:12:08.115373334 +0000 UTC m=+1283.436181020" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.129421 4822 scope.go:117] "RemoveContainer" containerID="4fc9d1e1036db88f60a278580b3c351e88f7e3893ae0bdbdd4d80ffdae35a520" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.133799 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.868658038 podStartE2EDuration="11.133785092s" podCreationTimestamp="2025-12-01 07:11:57 +0000 UTC" firstStartedPulling="2025-12-01 07:11:58.220707035 +0000 UTC m=+1273.541514721" lastFinishedPulling="2025-12-01 07:12:07.485834089 +0000 UTC m=+1282.806641775" observedRunningTime="2025-12-01 07:12:08.132420774 +0000 UTC m=+1283.453228460" watchObservedRunningTime="2025-12-01 07:12:08.133785092 +0000 UTC m=+1283.454592778" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.157424 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.164608 4822 scope.go:117] "RemoveContainer" containerID="c34156c2014761490e0d1d9d3e71e9381093b77f6e26e24ad7138af2f868ad62" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.177613 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.194164 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:08 crc kubenswrapper[4822]: E1201 07:12:08.194980 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-central-agent" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.195075 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-central-agent" Dec 01 07:12:08 crc kubenswrapper[4822]: E1201 07:12:08.195181 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="sg-core" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.195267 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="sg-core" Dec 01 07:12:08 crc kubenswrapper[4822]: E1201 07:12:08.195345 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-notification-agent" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.195429 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-notification-agent" Dec 01 07:12:08 crc kubenswrapper[4822]: E1201 07:12:08.195511 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="proxy-httpd" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.195611 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="proxy-httpd" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.195914 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="sg-core" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.195997 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="proxy-httpd" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.196077 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-notification-agent" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.196159 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" containerName="ceilometer-central-agent" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.205115 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.205422 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.207511 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.209169 4822 scope.go:117] "RemoveContainer" containerID="59c79f57c551b38dcff370abaa16d1ece43bee9e5687047a0bb2960aa500c9bd" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.210278 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.311276 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-scripts\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.311482 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4mwl\" (UniqueName: \"kubernetes.io/projected/c4bac5d5-04f6-4886-9010-0cbf4110363c-kube-api-access-h4mwl\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.311801 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-run-httpd\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.311943 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.311975 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-config-data\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.312017 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.312040 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-log-httpd\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.328278 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:08 crc kubenswrapper[4822]: E1201 07:12:08.329180 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-h4mwl log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="c4bac5d5-04f6-4886-9010-0cbf4110363c" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.413608 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.413654 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-config-data\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.413674 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.413690 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-log-httpd\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.413760 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-scripts\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.413848 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4mwl\" (UniqueName: \"kubernetes.io/projected/c4bac5d5-04f6-4886-9010-0cbf4110363c-kube-api-access-h4mwl\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.414359 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-log-httpd\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.414511 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-run-httpd\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.414775 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-run-httpd\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.417847 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.418675 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-scripts\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.418682 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-config-data\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.418845 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.443693 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4mwl\" (UniqueName: \"kubernetes.io/projected/c4bac5d5-04f6-4886-9010-0cbf4110363c-kube-api-access-h4mwl\") pod \"ceilometer-0\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " pod="openstack/ceilometer-0" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.961658 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2db9ac4-2fdc-414c-ac4a-9b9c187ed862" path="/var/lib/kubelet/pods/e2db9ac4-2fdc-414c-ac4a-9b9c187ed862/volumes" Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.996515 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-g5k9q"] Dec 01 07:12:08 crc kubenswrapper[4822]: I1201 07:12:08.997965 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.019044 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-g5k9q"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.104530 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.114813 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.125599 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-33d4-account-create-update-ng7q9"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.127046 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.125751 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/141fe1bc-0085-48cc-a283-f8596ff7240a-operator-scripts\") pod \"nova-api-db-create-g5k9q\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.127713 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhmdr\" (UniqueName: \"kubernetes.io/projected/141fe1bc-0085-48cc-a283-f8596ff7240a-kube-api-access-vhmdr\") pod \"nova-api-db-create-g5k9q\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.131818 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.175645 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-33d4-account-create-update-ng7q9"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.241975 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-sg-core-conf-yaml\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.242103 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-combined-ca-bundle\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.242128 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-run-httpd\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.242259 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-log-httpd\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.242304 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4mwl\" (UniqueName: \"kubernetes.io/projected/c4bac5d5-04f6-4886-9010-0cbf4110363c-kube-api-access-h4mwl\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.242361 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-config-data\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.242412 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-scripts\") pod \"c4bac5d5-04f6-4886-9010-0cbf4110363c\" (UID: \"c4bac5d5-04f6-4886-9010-0cbf4110363c\") " Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.244151 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.244270 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/141fe1bc-0085-48cc-a283-f8596ff7240a-operator-scripts\") pod \"nova-api-db-create-g5k9q\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.244401 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhmdr\" (UniqueName: \"kubernetes.io/projected/141fe1bc-0085-48cc-a283-f8596ff7240a-kube-api-access-vhmdr\") pod \"nova-api-db-create-g5k9q\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.244508 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7d41549-25ee-4695-a0fb-0db75a1d2238-operator-scripts\") pod \"nova-api-33d4-account-create-update-ng7q9\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.250914 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/141fe1bc-0085-48cc-a283-f8596ff7240a-operator-scripts\") pod \"nova-api-db-create-g5k9q\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.251698 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.253498 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.253765 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.244542 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zst2q\" (UniqueName: \"kubernetes.io/projected/b7d41549-25ee-4695-a0fb-0db75a1d2238-kube-api-access-zst2q\") pod \"nova-api-33d4-account-create-update-ng7q9\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.254413 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.254433 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.254457 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4bac5d5-04f6-4886-9010-0cbf4110363c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.254475 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.261783 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-config-data" (OuterVolumeSpecName: "config-data") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.289823 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-scripts" (OuterVolumeSpecName: "scripts") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.293373 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-rsm9w"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.299855 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4bac5d5-04f6-4886-9010-0cbf4110363c-kube-api-access-h4mwl" (OuterVolumeSpecName: "kube-api-access-h4mwl") pod "c4bac5d5-04f6-4886-9010-0cbf4110363c" (UID: "c4bac5d5-04f6-4886-9010-0cbf4110363c"). InnerVolumeSpecName "kube-api-access-h4mwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.332797 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.336965 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhmdr\" (UniqueName: \"kubernetes.io/projected/141fe1bc-0085-48cc-a283-f8596ff7240a-kube-api-access-vhmdr\") pod \"nova-api-db-create-g5k9q\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.360213 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7d41549-25ee-4695-a0fb-0db75a1d2238-operator-scripts\") pod \"nova-api-33d4-account-create-update-ng7q9\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.360292 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zst2q\" (UniqueName: \"kubernetes.io/projected/b7d41549-25ee-4695-a0fb-0db75a1d2238-kube-api-access-zst2q\") pod \"nova-api-33d4-account-create-update-ng7q9\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.360618 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4mwl\" (UniqueName: \"kubernetes.io/projected/c4bac5d5-04f6-4886-9010-0cbf4110363c-kube-api-access-h4mwl\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.360644 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.362332 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4bac5d5-04f6-4886-9010-0cbf4110363c-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.364183 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7d41549-25ee-4695-a0fb-0db75a1d2238-operator-scripts\") pod \"nova-api-33d4-account-create-update-ng7q9\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.379432 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rsm9w"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.423666 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zst2q\" (UniqueName: \"kubernetes.io/projected/b7d41549-25ee-4695-a0fb-0db75a1d2238-kube-api-access-zst2q\") pod \"nova-api-33d4-account-create-update-ng7q9\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.434452 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-2v2wt"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.435584 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.447235 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.457631 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-2v2wt"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.466563 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95469730-4c0e-45f8-b654-26449f12da7d-operator-scripts\") pod \"nova-cell0-db-create-rsm9w\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.466714 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6rwl\" (UniqueName: \"kubernetes.io/projected/95469730-4c0e-45f8-b654-26449f12da7d-kube-api-access-s6rwl\") pod \"nova-cell0-db-create-rsm9w\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.513650 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-4972-account-create-update-962bd"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.515329 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.519756 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.557619 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4972-account-create-update-962bd"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.571179 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f678b4fe-2c75-49a6-a191-7e31d28eea02-operator-scripts\") pod \"nova-cell1-db-create-2v2wt\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.571271 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6rwl\" (UniqueName: \"kubernetes.io/projected/95469730-4c0e-45f8-b654-26449f12da7d-kube-api-access-s6rwl\") pod \"nova-cell0-db-create-rsm9w\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.571304 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stjbc\" (UniqueName: \"kubernetes.io/projected/f14733ce-3da3-40ad-97d6-b9ce628590ec-kube-api-access-stjbc\") pod \"nova-cell0-4972-account-create-update-962bd\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.571337 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95469730-4c0e-45f8-b654-26449f12da7d-operator-scripts\") pod \"nova-cell0-db-create-rsm9w\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.571376 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5ls6\" (UniqueName: \"kubernetes.io/projected/f678b4fe-2c75-49a6-a191-7e31d28eea02-kube-api-access-t5ls6\") pod \"nova-cell1-db-create-2v2wt\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.571458 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14733ce-3da3-40ad-97d6-b9ce628590ec-operator-scripts\") pod \"nova-cell0-4972-account-create-update-962bd\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.572455 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95469730-4c0e-45f8-b654-26449f12da7d-operator-scripts\") pod \"nova-cell0-db-create-rsm9w\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.615043 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1050-account-create-update-2khv4"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.616807 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.633194 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.635233 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.654447 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1050-account-create-update-2khv4"] Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.659447 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6rwl\" (UniqueName: \"kubernetes.io/projected/95469730-4c0e-45f8-b654-26449f12da7d-kube-api-access-s6rwl\") pod \"nova-cell0-db-create-rsm9w\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.673476 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzdt5\" (UniqueName: \"kubernetes.io/projected/f13d0961-136f-4ad6-aa25-23c7e439847a-kube-api-access-lzdt5\") pod \"nova-cell1-1050-account-create-update-2khv4\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.673542 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5ls6\" (UniqueName: \"kubernetes.io/projected/f678b4fe-2c75-49a6-a191-7e31d28eea02-kube-api-access-t5ls6\") pod \"nova-cell1-db-create-2v2wt\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.673650 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14733ce-3da3-40ad-97d6-b9ce628590ec-operator-scripts\") pod \"nova-cell0-4972-account-create-update-962bd\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.673687 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f678b4fe-2c75-49a6-a191-7e31d28eea02-operator-scripts\") pod \"nova-cell1-db-create-2v2wt\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.673721 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stjbc\" (UniqueName: \"kubernetes.io/projected/f14733ce-3da3-40ad-97d6-b9ce628590ec-kube-api-access-stjbc\") pod \"nova-cell0-4972-account-create-update-962bd\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.673753 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13d0961-136f-4ad6-aa25-23c7e439847a-operator-scripts\") pod \"nova-cell1-1050-account-create-update-2khv4\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.674627 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14733ce-3da3-40ad-97d6-b9ce628590ec-operator-scripts\") pod \"nova-cell0-4972-account-create-update-962bd\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.674698 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f678b4fe-2c75-49a6-a191-7e31d28eea02-operator-scripts\") pod \"nova-cell1-db-create-2v2wt\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.723140 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stjbc\" (UniqueName: \"kubernetes.io/projected/f14733ce-3da3-40ad-97d6-b9ce628590ec-kube-api-access-stjbc\") pod \"nova-cell0-4972-account-create-update-962bd\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.741189 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5ls6\" (UniqueName: \"kubernetes.io/projected/f678b4fe-2c75-49a6-a191-7e31d28eea02-kube-api-access-t5ls6\") pod \"nova-cell1-db-create-2v2wt\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.742418 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.796704 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13d0961-136f-4ad6-aa25-23c7e439847a-operator-scripts\") pod \"nova-cell1-1050-account-create-update-2khv4\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.796856 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzdt5\" (UniqueName: \"kubernetes.io/projected/f13d0961-136f-4ad6-aa25-23c7e439847a-kube-api-access-lzdt5\") pod \"nova-cell1-1050-account-create-update-2khv4\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.798826 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13d0961-136f-4ad6-aa25-23c7e439847a-operator-scripts\") pod \"nova-cell1-1050-account-create-update-2khv4\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.830080 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzdt5\" (UniqueName: \"kubernetes.io/projected/f13d0961-136f-4ad6-aa25-23c7e439847a-kube-api-access-lzdt5\") pod \"nova-cell1-1050-account-create-update-2khv4\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.893722 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.954504 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:09 crc kubenswrapper[4822]: I1201 07:12:09.996248 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-33d4-account-create-update-ng7q9"] Dec 01 07:12:10 crc kubenswrapper[4822]: W1201 07:12:10.012704 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7d41549_25ee_4695_a0fb_0db75a1d2238.slice/crio-a1f867b6254338506a1eee634cf2617607c4d0a6f13df17250b0ee3c46a90bf5 WatchSource:0}: Error finding container a1f867b6254338506a1eee634cf2617607c4d0a6f13df17250b0ee3c46a90bf5: Status 404 returned error can't find the container with id a1f867b6254338506a1eee634cf2617607c4d0a6f13df17250b0ee3c46a90bf5 Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.082101 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.153800 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.155642 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-33d4-account-create-update-ng7q9" event={"ID":"b7d41549-25ee-4695-a0fb-0db75a1d2238","Type":"ContainerStarted","Data":"a1f867b6254338506a1eee634cf2617607c4d0a6f13df17250b0ee3c46a90bf5"} Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.257939 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.275945 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.286996 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.289378 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.295994 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.296350 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.297934 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.347905 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-g5k9q"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.412480 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4972-account-create-update-962bd"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439584 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-scripts\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439630 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439669 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-config-data\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439695 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-run-httpd\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439806 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-log-httpd\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439899 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.439974 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlqfl\" (UniqueName: \"kubernetes.io/projected/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-kube-api-access-tlqfl\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.542630 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.542716 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlqfl\" (UniqueName: \"kubernetes.io/projected/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-kube-api-access-tlqfl\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.542747 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-scripts\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.542768 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.542799 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-config-data\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.543718 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-run-httpd\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.543789 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-log-httpd\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.544160 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-log-httpd\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.544371 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-run-httpd\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.553825 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.554229 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.558873 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-config-data\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.560648 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-scripts\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.562599 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlqfl\" (UniqueName: \"kubernetes.io/projected/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-kube-api-access-tlqfl\") pod \"ceilometer-0\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.598619 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rsm9w"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.606918 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-2v2wt"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.617810 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.817953 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1050-account-create-update-2khv4"] Dec 01 07:12:10 crc kubenswrapper[4822]: I1201 07:12:10.963265 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4bac5d5-04f6-4886-9010-0cbf4110363c" path="/var/lib/kubelet/pods/c4bac5d5-04f6-4886-9010-0cbf4110363c/volumes" Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.212239 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2v2wt" event={"ID":"f678b4fe-2c75-49a6-a191-7e31d28eea02","Type":"ContainerStarted","Data":"dacd4a868e2652b5235b0281e3dd912a7ec2077150746b150606b21da568746e"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.212316 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2v2wt" event={"ID":"f678b4fe-2c75-49a6-a191-7e31d28eea02","Type":"ContainerStarted","Data":"e5bdef3575d9e6277ff799a5d1b8722aa9c79e96910a3753b73fee7d8443bcd4"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.227618 4822 generic.go:334] "Generic (PLEG): container finished" podID="b7d41549-25ee-4695-a0fb-0db75a1d2238" containerID="2db919405f6a2933e172ecad1f1d73d7ffb6d65d29819ebf4d8122913569142f" exitCode=0 Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.227692 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-33d4-account-create-update-ng7q9" event={"ID":"b7d41549-25ee-4695-a0fb-0db75a1d2238","Type":"ContainerDied","Data":"2db919405f6a2933e172ecad1f1d73d7ffb6d65d29819ebf4d8122913569142f"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.237494 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rsm9w" event={"ID":"95469730-4c0e-45f8-b654-26449f12da7d","Type":"ContainerStarted","Data":"7b3a6e868b5472233f1f6a54a7afaebe77ab0191dae41c207b1298b4a0e54500"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.237539 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rsm9w" event={"ID":"95469730-4c0e-45f8-b654-26449f12da7d","Type":"ContainerStarted","Data":"f68009f04f69f69e84439f7486c7feae249692eed73967a25bd4b53c86356b6d"} Dec 01 07:12:11 crc kubenswrapper[4822]: W1201 07:12:11.240787 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf6ad07c_9fbe_4b68_ade4_1ab1b755e3ad.slice/crio-d445e763f10464318061be1fb1d7f7151a95df6c4f78f2cb0bf7ea1306d536ae WatchSource:0}: Error finding container d445e763f10464318061be1fb1d7f7151a95df6c4f78f2cb0bf7ea1306d536ae: Status 404 returned error can't find the container with id d445e763f10464318061be1fb1d7f7151a95df6c4f78f2cb0bf7ea1306d536ae Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.246341 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4972-account-create-update-962bd" event={"ID":"f14733ce-3da3-40ad-97d6-b9ce628590ec","Type":"ContainerStarted","Data":"628d09c60f5340984c3a0cf55815e7eba46360cfa8b9ded6abf84c8536c847d3"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.246397 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4972-account-create-update-962bd" event={"ID":"f14733ce-3da3-40ad-97d6-b9ce628590ec","Type":"ContainerStarted","Data":"f0c0c7636ed6e24c730e90d5ef45c83878b1f1a89651d738da9ac5a941179984"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.249308 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.256731 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-2v2wt" podStartSLOduration=2.256698928 podStartE2EDuration="2.256698928s" podCreationTimestamp="2025-12-01 07:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:11.232391865 +0000 UTC m=+1286.553199551" watchObservedRunningTime="2025-12-01 07:12:11.256698928 +0000 UTC m=+1286.577506614" Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.261543 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g5k9q" event={"ID":"141fe1bc-0085-48cc-a283-f8596ff7240a","Type":"ContainerStarted","Data":"4ab0177f95b86e1dd9ace38b493c47efe247336706a36e6bd5c5cfd1864e64b5"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.261633 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g5k9q" event={"ID":"141fe1bc-0085-48cc-a283-f8596ff7240a","Type":"ContainerStarted","Data":"721ce5ad16d75c1f40ae110721db53904e21d29e0920f36c01f50e5182c73218"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.268950 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1050-account-create-update-2khv4" event={"ID":"f13d0961-136f-4ad6-aa25-23c7e439847a","Type":"ContainerStarted","Data":"23ece03fa6aebdc1cf2aed8e717b65e8f2fd1b8e42bb8f6df11deb02c5eb932b"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.269005 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1050-account-create-update-2khv4" event={"ID":"f13d0961-136f-4ad6-aa25-23c7e439847a","Type":"ContainerStarted","Data":"0b098b76d10e8f72a161c1fa43cd0a0783aaaf818e520e4b7c5a9113099859f3"} Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.317296 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-4972-account-create-update-962bd" podStartSLOduration=2.317259351 podStartE2EDuration="2.317259351s" podCreationTimestamp="2025-12-01 07:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:11.269853758 +0000 UTC m=+1286.590661454" watchObservedRunningTime="2025-12-01 07:12:11.317259351 +0000 UTC m=+1286.638067037" Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.340039 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-rsm9w" podStartSLOduration=2.340006681 podStartE2EDuration="2.340006681s" podCreationTimestamp="2025-12-01 07:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:11.285992572 +0000 UTC m=+1286.606800258" watchObservedRunningTime="2025-12-01 07:12:11.340006681 +0000 UTC m=+1286.660814367" Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.375989 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-1050-account-create-update-2khv4" podStartSLOduration=2.375961262 podStartE2EDuration="2.375961262s" podCreationTimestamp="2025-12-01 07:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:11.311925661 +0000 UTC m=+1286.632733347" watchObservedRunningTime="2025-12-01 07:12:11.375961262 +0000 UTC m=+1286.696768948" Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.399967 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-g5k9q" podStartSLOduration=3.399935646 podStartE2EDuration="3.399935646s" podCreationTimestamp="2025-12-01 07:12:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:11.333164069 +0000 UTC m=+1286.653971755" watchObservedRunningTime="2025-12-01 07:12:11.399935646 +0000 UTC m=+1286.720743332" Dec 01 07:12:11 crc kubenswrapper[4822]: I1201 07:12:11.776665 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.280299 4822 generic.go:334] "Generic (PLEG): container finished" podID="f14733ce-3da3-40ad-97d6-b9ce628590ec" containerID="628d09c60f5340984c3a0cf55815e7eba46360cfa8b9ded6abf84c8536c847d3" exitCode=0 Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.280401 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4972-account-create-update-962bd" event={"ID":"f14733ce-3da3-40ad-97d6-b9ce628590ec","Type":"ContainerDied","Data":"628d09c60f5340984c3a0cf55815e7eba46360cfa8b9ded6abf84c8536c847d3"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.283626 4822 generic.go:334] "Generic (PLEG): container finished" podID="141fe1bc-0085-48cc-a283-f8596ff7240a" containerID="4ab0177f95b86e1dd9ace38b493c47efe247336706a36e6bd5c5cfd1864e64b5" exitCode=0 Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.283689 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g5k9q" event={"ID":"141fe1bc-0085-48cc-a283-f8596ff7240a","Type":"ContainerDied","Data":"4ab0177f95b86e1dd9ace38b493c47efe247336706a36e6bd5c5cfd1864e64b5"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.285862 4822 generic.go:334] "Generic (PLEG): container finished" podID="f13d0961-136f-4ad6-aa25-23c7e439847a" containerID="23ece03fa6aebdc1cf2aed8e717b65e8f2fd1b8e42bb8f6df11deb02c5eb932b" exitCode=0 Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.285904 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1050-account-create-update-2khv4" event={"ID":"f13d0961-136f-4ad6-aa25-23c7e439847a","Type":"ContainerDied","Data":"23ece03fa6aebdc1cf2aed8e717b65e8f2fd1b8e42bb8f6df11deb02c5eb932b"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.287657 4822 generic.go:334] "Generic (PLEG): container finished" podID="f678b4fe-2c75-49a6-a191-7e31d28eea02" containerID="dacd4a868e2652b5235b0281e3dd912a7ec2077150746b150606b21da568746e" exitCode=0 Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.287753 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2v2wt" event={"ID":"f678b4fe-2c75-49a6-a191-7e31d28eea02","Type":"ContainerDied","Data":"dacd4a868e2652b5235b0281e3dd912a7ec2077150746b150606b21da568746e"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.290213 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerStarted","Data":"9d8faff32bf994bf2c47b57483f4c6c52965e82a69a3187835b490f57b35a682"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.290241 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerStarted","Data":"d445e763f10464318061be1fb1d7f7151a95df6c4f78f2cb0bf7ea1306d536ae"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.291895 4822 generic.go:334] "Generic (PLEG): container finished" podID="95469730-4c0e-45f8-b654-26449f12da7d" containerID="7b3a6e868b5472233f1f6a54a7afaebe77ab0191dae41c207b1298b4a0e54500" exitCode=0 Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.292099 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rsm9w" event={"ID":"95469730-4c0e-45f8-b654-26449f12da7d","Type":"ContainerDied","Data":"7b3a6e868b5472233f1f6a54a7afaebe77ab0191dae41c207b1298b4a0e54500"} Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.542935 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.542993 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.737068 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.916589 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7d41549-25ee-4695-a0fb-0db75a1d2238-operator-scripts\") pod \"b7d41549-25ee-4695-a0fb-0db75a1d2238\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.916754 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zst2q\" (UniqueName: \"kubernetes.io/projected/b7d41549-25ee-4695-a0fb-0db75a1d2238-kube-api-access-zst2q\") pod \"b7d41549-25ee-4695-a0fb-0db75a1d2238\" (UID: \"b7d41549-25ee-4695-a0fb-0db75a1d2238\") " Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.917478 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7d41549-25ee-4695-a0fb-0db75a1d2238-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b7d41549-25ee-4695-a0fb-0db75a1d2238" (UID: "b7d41549-25ee-4695-a0fb-0db75a1d2238"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:12 crc kubenswrapper[4822]: I1201 07:12:12.932313 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7d41549-25ee-4695-a0fb-0db75a1d2238-kube-api-access-zst2q" (OuterVolumeSpecName: "kube-api-access-zst2q") pod "b7d41549-25ee-4695-a0fb-0db75a1d2238" (UID: "b7d41549-25ee-4695-a0fb-0db75a1d2238"). InnerVolumeSpecName "kube-api-access-zst2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.019098 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7d41549-25ee-4695-a0fb-0db75a1d2238-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.019133 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zst2q\" (UniqueName: \"kubernetes.io/projected/b7d41549-25ee-4695-a0fb-0db75a1d2238-kube-api-access-zst2q\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.304351 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-33d4-account-create-update-ng7q9" event={"ID":"b7d41549-25ee-4695-a0fb-0db75a1d2238","Type":"ContainerDied","Data":"a1f867b6254338506a1eee634cf2617607c4d0a6f13df17250b0ee3c46a90bf5"} Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.304397 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1f867b6254338506a1eee634cf2617607c4d0a6f13df17250b0ee3c46a90bf5" Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.304470 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-33d4-account-create-update-ng7q9" Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.309073 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerStarted","Data":"1307621a19c792e25871c90ba67497ae97b641c73af76fcf3d9d883af39283df"} Dec 01 07:12:13 crc kubenswrapper[4822]: I1201 07:12:13.878892 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.040142 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95469730-4c0e-45f8-b654-26449f12da7d-operator-scripts\") pod \"95469730-4c0e-45f8-b654-26449f12da7d\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.040245 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6rwl\" (UniqueName: \"kubernetes.io/projected/95469730-4c0e-45f8-b654-26449f12da7d-kube-api-access-s6rwl\") pod \"95469730-4c0e-45f8-b654-26449f12da7d\" (UID: \"95469730-4c0e-45f8-b654-26449f12da7d\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.042202 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95469730-4c0e-45f8-b654-26449f12da7d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "95469730-4c0e-45f8-b654-26449f12da7d" (UID: "95469730-4c0e-45f8-b654-26449f12da7d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.051874 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95469730-4c0e-45f8-b654-26449f12da7d-kube-api-access-s6rwl" (OuterVolumeSpecName: "kube-api-access-s6rwl") pod "95469730-4c0e-45f8-b654-26449f12da7d" (UID: "95469730-4c0e-45f8-b654-26449f12da7d"). InnerVolumeSpecName "kube-api-access-s6rwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.142694 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95469730-4c0e-45f8-b654-26449f12da7d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.142744 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6rwl\" (UniqueName: \"kubernetes.io/projected/95469730-4c0e-45f8-b654-26449f12da7d-kube-api-access-s6rwl\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.341924 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerStarted","Data":"b3a17d034d0e1ec0604272df30590fe25466abd87114242078b5ac2912336f9e"} Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.344099 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rsm9w" event={"ID":"95469730-4c0e-45f8-b654-26449f12da7d","Type":"ContainerDied","Data":"f68009f04f69f69e84439f7486c7feae249692eed73967a25bd4b53c86356b6d"} Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.344137 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f68009f04f69f69e84439f7486c7feae249692eed73967a25bd4b53c86356b6d" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.344167 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rsm9w" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.770818 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.776151 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.781935 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.804992 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956205 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stjbc\" (UniqueName: \"kubernetes.io/projected/f14733ce-3da3-40ad-97d6-b9ce628590ec-kube-api-access-stjbc\") pod \"f14733ce-3da3-40ad-97d6-b9ce628590ec\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956334 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzdt5\" (UniqueName: \"kubernetes.io/projected/f13d0961-136f-4ad6-aa25-23c7e439847a-kube-api-access-lzdt5\") pod \"f13d0961-136f-4ad6-aa25-23c7e439847a\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956356 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5ls6\" (UniqueName: \"kubernetes.io/projected/f678b4fe-2c75-49a6-a191-7e31d28eea02-kube-api-access-t5ls6\") pod \"f678b4fe-2c75-49a6-a191-7e31d28eea02\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956493 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13d0961-136f-4ad6-aa25-23c7e439847a-operator-scripts\") pod \"f13d0961-136f-4ad6-aa25-23c7e439847a\" (UID: \"f13d0961-136f-4ad6-aa25-23c7e439847a\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956514 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14733ce-3da3-40ad-97d6-b9ce628590ec-operator-scripts\") pod \"f14733ce-3da3-40ad-97d6-b9ce628590ec\" (UID: \"f14733ce-3da3-40ad-97d6-b9ce628590ec\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956537 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhmdr\" (UniqueName: \"kubernetes.io/projected/141fe1bc-0085-48cc-a283-f8596ff7240a-kube-api-access-vhmdr\") pod \"141fe1bc-0085-48cc-a283-f8596ff7240a\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956576 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/141fe1bc-0085-48cc-a283-f8596ff7240a-operator-scripts\") pod \"141fe1bc-0085-48cc-a283-f8596ff7240a\" (UID: \"141fe1bc-0085-48cc-a283-f8596ff7240a\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.956632 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f678b4fe-2c75-49a6-a191-7e31d28eea02-operator-scripts\") pod \"f678b4fe-2c75-49a6-a191-7e31d28eea02\" (UID: \"f678b4fe-2c75-49a6-a191-7e31d28eea02\") " Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.957881 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f14733ce-3da3-40ad-97d6-b9ce628590ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f14733ce-3da3-40ad-97d6-b9ce628590ec" (UID: "f14733ce-3da3-40ad-97d6-b9ce628590ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.957907 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f678b4fe-2c75-49a6-a191-7e31d28eea02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f678b4fe-2c75-49a6-a191-7e31d28eea02" (UID: "f678b4fe-2c75-49a6-a191-7e31d28eea02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.957956 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/141fe1bc-0085-48cc-a283-f8596ff7240a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "141fe1bc-0085-48cc-a283-f8596ff7240a" (UID: "141fe1bc-0085-48cc-a283-f8596ff7240a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.958045 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f13d0961-136f-4ad6-aa25-23c7e439847a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f13d0961-136f-4ad6-aa25-23c7e439847a" (UID: "f13d0961-136f-4ad6-aa25-23c7e439847a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.963632 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f678b4fe-2c75-49a6-a191-7e31d28eea02-kube-api-access-t5ls6" (OuterVolumeSpecName: "kube-api-access-t5ls6") pod "f678b4fe-2c75-49a6-a191-7e31d28eea02" (UID: "f678b4fe-2c75-49a6-a191-7e31d28eea02"). InnerVolumeSpecName "kube-api-access-t5ls6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.963687 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f14733ce-3da3-40ad-97d6-b9ce628590ec-kube-api-access-stjbc" (OuterVolumeSpecName: "kube-api-access-stjbc") pod "f14733ce-3da3-40ad-97d6-b9ce628590ec" (UID: "f14733ce-3da3-40ad-97d6-b9ce628590ec"). InnerVolumeSpecName "kube-api-access-stjbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.964468 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/141fe1bc-0085-48cc-a283-f8596ff7240a-kube-api-access-vhmdr" (OuterVolumeSpecName: "kube-api-access-vhmdr") pod "141fe1bc-0085-48cc-a283-f8596ff7240a" (UID: "141fe1bc-0085-48cc-a283-f8596ff7240a"). InnerVolumeSpecName "kube-api-access-vhmdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:14 crc kubenswrapper[4822]: I1201 07:12:14.971843 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13d0961-136f-4ad6-aa25-23c7e439847a-kube-api-access-lzdt5" (OuterVolumeSpecName: "kube-api-access-lzdt5") pod "f13d0961-136f-4ad6-aa25-23c7e439847a" (UID: "f13d0961-136f-4ad6-aa25-23c7e439847a"). InnerVolumeSpecName "kube-api-access-lzdt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060639 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13d0961-136f-4ad6-aa25-23c7e439847a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060689 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14733ce-3da3-40ad-97d6-b9ce628590ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060710 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhmdr\" (UniqueName: \"kubernetes.io/projected/141fe1bc-0085-48cc-a283-f8596ff7240a-kube-api-access-vhmdr\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060732 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/141fe1bc-0085-48cc-a283-f8596ff7240a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060749 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f678b4fe-2c75-49a6-a191-7e31d28eea02-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060764 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stjbc\" (UniqueName: \"kubernetes.io/projected/f14733ce-3da3-40ad-97d6-b9ce628590ec-kube-api-access-stjbc\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060779 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzdt5\" (UniqueName: \"kubernetes.io/projected/f13d0961-136f-4ad6-aa25-23c7e439847a-kube-api-access-lzdt5\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.060795 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5ls6\" (UniqueName: \"kubernetes.io/projected/f678b4fe-2c75-49a6-a191-7e31d28eea02-kube-api-access-t5ls6\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.353651 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4972-account-create-update-962bd" event={"ID":"f14733ce-3da3-40ad-97d6-b9ce628590ec","Type":"ContainerDied","Data":"f0c0c7636ed6e24c730e90d5ef45c83878b1f1a89651d738da9ac5a941179984"} Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.353694 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0c0c7636ed6e24c730e90d5ef45c83878b1f1a89651d738da9ac5a941179984" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.353679 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4972-account-create-update-962bd" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.355043 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g5k9q" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.355238 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g5k9q" event={"ID":"141fe1bc-0085-48cc-a283-f8596ff7240a","Type":"ContainerDied","Data":"721ce5ad16d75c1f40ae110721db53904e21d29e0920f36c01f50e5182c73218"} Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.355291 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="721ce5ad16d75c1f40ae110721db53904e21d29e0920f36c01f50e5182c73218" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.358393 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1050-account-create-update-2khv4" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.358513 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1050-account-create-update-2khv4" event={"ID":"f13d0961-136f-4ad6-aa25-23c7e439847a","Type":"ContainerDied","Data":"0b098b76d10e8f72a161c1fa43cd0a0783aaaf818e520e4b7c5a9113099859f3"} Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.358540 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b098b76d10e8f72a161c1fa43cd0a0783aaaf818e520e4b7c5a9113099859f3" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.359573 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2v2wt" event={"ID":"f678b4fe-2c75-49a6-a191-7e31d28eea02","Type":"ContainerDied","Data":"e5bdef3575d9e6277ff799a5d1b8722aa9c79e96910a3753b73fee7d8443bcd4"} Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.359597 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5bdef3575d9e6277ff799a5d1b8722aa9c79e96910a3753b73fee7d8443bcd4" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.359708 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2v2wt" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.362278 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerStarted","Data":"541a3b4d3454cc4b59791b20d14807f08e225f86ccc5af8d74f1fada1a910b56"} Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.362425 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-central-agent" containerID="cri-o://9d8faff32bf994bf2c47b57483f4c6c52965e82a69a3187835b490f57b35a682" gracePeriod=30 Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.362626 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.362643 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="proxy-httpd" containerID="cri-o://541a3b4d3454cc4b59791b20d14807f08e225f86ccc5af8d74f1fada1a910b56" gracePeriod=30 Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.362676 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-notification-agent" containerID="cri-o://1307621a19c792e25871c90ba67497ae97b641c73af76fcf3d9d883af39283df" gracePeriod=30 Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.362714 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="sg-core" containerID="cri-o://b3a17d034d0e1ec0604272df30590fe25466abd87114242078b5ac2912336f9e" gracePeriod=30 Dec 01 07:12:15 crc kubenswrapper[4822]: I1201 07:12:15.399191 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5057146609999998 podStartE2EDuration="5.399166398s" podCreationTimestamp="2025-12-01 07:12:10 +0000 UTC" firstStartedPulling="2025-12-01 07:12:11.245500243 +0000 UTC m=+1286.566307929" lastFinishedPulling="2025-12-01 07:12:15.13895198 +0000 UTC m=+1290.459759666" observedRunningTime="2025-12-01 07:12:15.389950958 +0000 UTC m=+1290.710758644" watchObservedRunningTime="2025-12-01 07:12:15.399166398 +0000 UTC m=+1290.719974084" Dec 01 07:12:16 crc kubenswrapper[4822]: I1201 07:12:16.372635 4822 generic.go:334] "Generic (PLEG): container finished" podID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerID="b3a17d034d0e1ec0604272df30590fe25466abd87114242078b5ac2912336f9e" exitCode=2 Dec 01 07:12:16 crc kubenswrapper[4822]: I1201 07:12:16.372674 4822 generic.go:334] "Generic (PLEG): container finished" podID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerID="1307621a19c792e25871c90ba67497ae97b641c73af76fcf3d9d883af39283df" exitCode=0 Dec 01 07:12:16 crc kubenswrapper[4822]: I1201 07:12:16.372696 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerDied","Data":"b3a17d034d0e1ec0604272df30590fe25466abd87114242078b5ac2912336f9e"} Dec 01 07:12:16 crc kubenswrapper[4822]: I1201 07:12:16.372723 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerDied","Data":"1307621a19c792e25871c90ba67497ae97b641c73af76fcf3d9d883af39283df"} Dec 01 07:12:16 crc kubenswrapper[4822]: I1201 07:12:16.961869 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:16 crc kubenswrapper[4822]: I1201 07:12:16.964785 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.622489 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vgj7h"] Dec 01 07:12:19 crc kubenswrapper[4822]: E1201 07:12:19.623598 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="141fe1bc-0085-48cc-a283-f8596ff7240a" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623612 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="141fe1bc-0085-48cc-a283-f8596ff7240a" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: E1201 07:12:19.623629 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13d0961-136f-4ad6-aa25-23c7e439847a" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623635 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13d0961-136f-4ad6-aa25-23c7e439847a" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: E1201 07:12:19.623663 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95469730-4c0e-45f8-b654-26449f12da7d" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623669 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="95469730-4c0e-45f8-b654-26449f12da7d" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: E1201 07:12:19.623684 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f678b4fe-2c75-49a6-a191-7e31d28eea02" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623691 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f678b4fe-2c75-49a6-a191-7e31d28eea02" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: E1201 07:12:19.623700 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7d41549-25ee-4695-a0fb-0db75a1d2238" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623707 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7d41549-25ee-4695-a0fb-0db75a1d2238" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: E1201 07:12:19.623719 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f14733ce-3da3-40ad-97d6-b9ce628590ec" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623725 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f14733ce-3da3-40ad-97d6-b9ce628590ec" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623903 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13d0961-136f-4ad6-aa25-23c7e439847a" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623917 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f14733ce-3da3-40ad-97d6-b9ce628590ec" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623928 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f678b4fe-2c75-49a6-a191-7e31d28eea02" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623941 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="95469730-4c0e-45f8-b654-26449f12da7d" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623953 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7d41549-25ee-4695-a0fb-0db75a1d2238" containerName="mariadb-account-create-update" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.623967 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="141fe1bc-0085-48cc-a283-f8596ff7240a" containerName="mariadb-database-create" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.624501 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.627113 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.627428 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.627427 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hl2xx" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.641888 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vgj7h"] Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.748458 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrm6b\" (UniqueName: \"kubernetes.io/projected/69ebc96d-4310-46ec-a237-9f299e08dc64-kube-api-access-lrm6b\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.748525 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.748612 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-config-data\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.748694 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-scripts\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.850638 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrm6b\" (UniqueName: \"kubernetes.io/projected/69ebc96d-4310-46ec-a237-9f299e08dc64-kube-api-access-lrm6b\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.850698 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.850754 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-config-data\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.850789 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-scripts\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.859083 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-config-data\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.861142 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-scripts\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.864500 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.881613 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrm6b\" (UniqueName: \"kubernetes.io/projected/69ebc96d-4310-46ec-a237-9f299e08dc64-kube-api-access-lrm6b\") pod \"nova-cell0-conductor-db-sync-vgj7h\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:19 crc kubenswrapper[4822]: I1201 07:12:19.954176 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:20 crc kubenswrapper[4822]: I1201 07:12:20.607612 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vgj7h"] Dec 01 07:12:20 crc kubenswrapper[4822]: W1201 07:12:20.617118 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69ebc96d_4310_46ec_a237_9f299e08dc64.slice/crio-e3f1a32a6a77baf5b5ea38bafcfffaf608ba2c7cd4cff5e2f3d1c177d53e5d2b WatchSource:0}: Error finding container e3f1a32a6a77baf5b5ea38bafcfffaf608ba2c7cd4cff5e2f3d1c177d53e5d2b: Status 404 returned error can't find the container with id e3f1a32a6a77baf5b5ea38bafcfffaf608ba2c7cd4cff5e2f3d1c177d53e5d2b Dec 01 07:12:21 crc kubenswrapper[4822]: I1201 07:12:21.418886 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" event={"ID":"69ebc96d-4310-46ec-a237-9f299e08dc64","Type":"ContainerStarted","Data":"e3f1a32a6a77baf5b5ea38bafcfffaf608ba2c7cd4cff5e2f3d1c177d53e5d2b"} Dec 01 07:12:22 crc kubenswrapper[4822]: I1201 07:12:22.446489 4822 generic.go:334] "Generic (PLEG): container finished" podID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerID="9d8faff32bf994bf2c47b57483f4c6c52965e82a69a3187835b490f57b35a682" exitCode=0 Dec 01 07:12:22 crc kubenswrapper[4822]: I1201 07:12:22.446834 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerDied","Data":"9d8faff32bf994bf2c47b57483f4c6c52965e82a69a3187835b490f57b35a682"} Dec 01 07:12:23 crc kubenswrapper[4822]: I1201 07:12:23.676339 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:12:23 crc kubenswrapper[4822]: I1201 07:12:23.677202 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-httpd" containerID="cri-o://6b21ee5ce8f0abec5b0d22757a6f4c5fbcb856a119ec2067aaacc6a204aa2e1f" gracePeriod=30 Dec 01 07:12:23 crc kubenswrapper[4822]: I1201 07:12:23.676870 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-log" containerID="cri-o://7cd21da61099911a74c5da833d9f3efde198fb8f4dd79ac58463851e358cb535" gracePeriod=30 Dec 01 07:12:24 crc kubenswrapper[4822]: I1201 07:12:24.472465 4822 generic.go:334] "Generic (PLEG): container finished" podID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerID="7cd21da61099911a74c5da833d9f3efde198fb8f4dd79ac58463851e358cb535" exitCode=143 Dec 01 07:12:24 crc kubenswrapper[4822]: I1201 07:12:24.472530 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2","Type":"ContainerDied","Data":"7cd21da61099911a74c5da833d9f3efde198fb8f4dd79ac58463851e358cb535"} Dec 01 07:12:27 crc kubenswrapper[4822]: I1201 07:12:27.529228 4822 generic.go:334] "Generic (PLEG): container finished" podID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerID="6b21ee5ce8f0abec5b0d22757a6f4c5fbcb856a119ec2067aaacc6a204aa2e1f" exitCode=0 Dec 01 07:12:27 crc kubenswrapper[4822]: I1201 07:12:27.529502 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2","Type":"ContainerDied","Data":"6b21ee5ce8f0abec5b0d22757a6f4c5fbcb856a119ec2067aaacc6a204aa2e1f"} Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.166199 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263222 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-combined-ca-bundle\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263639 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-httpd-run\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263713 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-logs\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263742 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8z6l\" (UniqueName: \"kubernetes.io/projected/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-kube-api-access-w8z6l\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263809 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-internal-tls-certs\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263838 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-config-data\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263873 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.263902 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-scripts\") pod \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\" (UID: \"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2\") " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.266738 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.266928 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-logs" (OuterVolumeSpecName: "logs") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.274010 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-scripts" (OuterVolumeSpecName: "scripts") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.274740 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-kube-api-access-w8z6l" (OuterVolumeSpecName: "kube-api-access-w8z6l") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "kube-api-access-w8z6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.279415 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.313567 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.342937 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-config-data" (OuterVolumeSpecName: "config-data") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.350923 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" (UID: "fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366077 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366288 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366381 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366456 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8z6l\" (UniqueName: \"kubernetes.io/projected/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-kube-api-access-w8z6l\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366521 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366603 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366726 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.366816 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.391918 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.468772 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.542018 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.542011 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2","Type":"ContainerDied","Data":"6416935c4a73cd31879fe7faee99a7a861fedc4d5f95d5624eee541d08176870"} Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.542186 4822 scope.go:117] "RemoveContainer" containerID="6b21ee5ce8f0abec5b0d22757a6f4c5fbcb856a119ec2067aaacc6a204aa2e1f" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.543424 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" event={"ID":"69ebc96d-4310-46ec-a237-9f299e08dc64","Type":"ContainerStarted","Data":"9f565c712bfac69eb668bb277f3e84214426f99043d5e8407c46ddd71a4c7191"} Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.560811 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" podStartSLOduration=2.308621309 podStartE2EDuration="9.560793624s" podCreationTimestamp="2025-12-01 07:12:19 +0000 UTC" firstStartedPulling="2025-12-01 07:12:20.620038174 +0000 UTC m=+1295.940845860" lastFinishedPulling="2025-12-01 07:12:27.872210489 +0000 UTC m=+1303.193018175" observedRunningTime="2025-12-01 07:12:28.55745709 +0000 UTC m=+1303.878264786" watchObservedRunningTime="2025-12-01 07:12:28.560793624 +0000 UTC m=+1303.881601300" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.582066 4822 scope.go:117] "RemoveContainer" containerID="7cd21da61099911a74c5da833d9f3efde198fb8f4dd79ac58463851e358cb535" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.584702 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.604055 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.615940 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:12:28 crc kubenswrapper[4822]: E1201 07:12:28.616475 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-log" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.616496 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-log" Dec 01 07:12:28 crc kubenswrapper[4822]: E1201 07:12:28.616562 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-httpd" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.616575 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-httpd" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.616861 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-httpd" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.616904 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" containerName="glance-log" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.618169 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.623488 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.623776 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.625653 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.778222 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.778472 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.778585 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.778659 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.778754 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.778885 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.779050 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.779179 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx92k\" (UniqueName: \"kubernetes.io/projected/4508389c-d1b8-4646-902e-4fbb597de2b7-kube-api-access-lx92k\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.880511 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.880844 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.880979 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881061 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.880921 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881186 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881144 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881309 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881365 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881490 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.881577 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx92k\" (UniqueName: \"kubernetes.io/projected/4508389c-d1b8-4646-902e-4fbb597de2b7-kube-api-access-lx92k\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.885083 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.885696 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.887108 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.897449 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.911956 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.913770 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx92k\" (UniqueName: \"kubernetes.io/projected/4508389c-d1b8-4646-902e-4fbb597de2b7-kube-api-access-lx92k\") pod \"glance-default-internal-api-0\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.935023 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:28 crc kubenswrapper[4822]: I1201 07:12:28.964825 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2" path="/var/lib/kubelet/pods/fb9eaa59-4e85-4dd0-b5e3-9e42001f05f2/volumes" Dec 01 07:12:29 crc kubenswrapper[4822]: I1201 07:12:29.547626 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:12:30 crc kubenswrapper[4822]: I1201 07:12:30.584216 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4508389c-d1b8-4646-902e-4fbb597de2b7","Type":"ContainerStarted","Data":"0a586d29377fca2fb0c88e3caf95850b3d5091a08ee0961b98a076104df131df"} Dec 01 07:12:30 crc kubenswrapper[4822]: I1201 07:12:30.584463 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4508389c-d1b8-4646-902e-4fbb597de2b7","Type":"ContainerStarted","Data":"a015b79c865e58f510b41e6d96d06aea686c1ce7cd540abaa49dfbc4e0988fda"} Dec 01 07:12:31 crc kubenswrapper[4822]: I1201 07:12:31.596116 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4508389c-d1b8-4646-902e-4fbb597de2b7","Type":"ContainerStarted","Data":"b3c842a2cb463bfede65789670bfcdeca18f9089a409fd8da36121671752306c"} Dec 01 07:12:31 crc kubenswrapper[4822]: I1201 07:12:31.634230 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.634210698 podStartE2EDuration="3.634210698s" podCreationTimestamp="2025-12-01 07:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:31.616299474 +0000 UTC m=+1306.937107180" watchObservedRunningTime="2025-12-01 07:12:31.634210698 +0000 UTC m=+1306.955018384" Dec 01 07:12:33 crc kubenswrapper[4822]: I1201 07:12:33.134455 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:12:33 crc kubenswrapper[4822]: I1201 07:12:33.134976 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-log" containerID="cri-o://e016370e4b35515437781c55ea2fbcaf57b7c98d9107e21eb86fc167091c0bed" gracePeriod=30 Dec 01 07:12:33 crc kubenswrapper[4822]: I1201 07:12:33.135078 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-httpd" containerID="cri-o://c2450bf788f356d13f91c3cf9dd8d0ca0efcdb51deb175e0510e41c6dbedde4e" gracePeriod=30 Dec 01 07:12:33 crc kubenswrapper[4822]: I1201 07:12:33.616361 4822 generic.go:334] "Generic (PLEG): container finished" podID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerID="e016370e4b35515437781c55ea2fbcaf57b7c98d9107e21eb86fc167091c0bed" exitCode=143 Dec 01 07:12:33 crc kubenswrapper[4822]: I1201 07:12:33.616414 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"911beba1-8e38-4051-a88c-71e0a9ac20a4","Type":"ContainerDied","Data":"e016370e4b35515437781c55ea2fbcaf57b7c98d9107e21eb86fc167091c0bed"} Dec 01 07:12:36 crc kubenswrapper[4822]: I1201 07:12:36.647050 4822 generic.go:334] "Generic (PLEG): container finished" podID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerID="c2450bf788f356d13f91c3cf9dd8d0ca0efcdb51deb175e0510e41c6dbedde4e" exitCode=0 Dec 01 07:12:36 crc kubenswrapper[4822]: I1201 07:12:36.647128 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"911beba1-8e38-4051-a88c-71e0a9ac20a4","Type":"ContainerDied","Data":"c2450bf788f356d13f91c3cf9dd8d0ca0efcdb51deb175e0510e41c6dbedde4e"} Dec 01 07:12:36 crc kubenswrapper[4822]: I1201 07:12:36.968497 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136192 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv2wj\" (UniqueName: \"kubernetes.io/projected/911beba1-8e38-4051-a88c-71e0a9ac20a4-kube-api-access-vv2wj\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136291 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-combined-ca-bundle\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136387 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-logs\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136419 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-public-tls-certs\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136494 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-httpd-run\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136624 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-config-data\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136654 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-scripts\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.136677 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"911beba1-8e38-4051-a88c-71e0a9ac20a4\" (UID: \"911beba1-8e38-4051-a88c-71e0a9ac20a4\") " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.137087 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.137253 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.137541 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-logs" (OuterVolumeSpecName: "logs") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.142959 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/911beba1-8e38-4051-a88c-71e0a9ac20a4-kube-api-access-vv2wj" (OuterVolumeSpecName: "kube-api-access-vv2wj") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "kube-api-access-vv2wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.143312 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-scripts" (OuterVolumeSpecName: "scripts") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.145395 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.184206 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.221637 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-config-data" (OuterVolumeSpecName: "config-data") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.222718 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "911beba1-8e38-4051-a88c-71e0a9ac20a4" (UID: "911beba1-8e38-4051-a88c-71e0a9ac20a4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240261 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv2wj\" (UniqueName: \"kubernetes.io/projected/911beba1-8e38-4051-a88c-71e0a9ac20a4-kube-api-access-vv2wj\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240322 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240336 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/911beba1-8e38-4051-a88c-71e0a9ac20a4-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240350 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240364 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240380 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/911beba1-8e38-4051-a88c-71e0a9ac20a4-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.240435 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.268324 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.341667 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.664273 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"911beba1-8e38-4051-a88c-71e0a9ac20a4","Type":"ContainerDied","Data":"6a74119847719bd60205b8a506667b4cbed56c5327b2a0fd5518c94927d3ed88"} Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.665718 4822 scope.go:117] "RemoveContainer" containerID="c2450bf788f356d13f91c3cf9dd8d0ca0efcdb51deb175e0510e41c6dbedde4e" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.664394 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.710131 4822 scope.go:117] "RemoveContainer" containerID="e016370e4b35515437781c55ea2fbcaf57b7c98d9107e21eb86fc167091c0bed" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.714058 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.726016 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.750947 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:12:37 crc kubenswrapper[4822]: E1201 07:12:37.752044 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-log" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.752154 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-log" Dec 01 07:12:37 crc kubenswrapper[4822]: E1201 07:12:37.752250 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-httpd" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.752327 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-httpd" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.752650 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-httpd" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.752743 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" containerName="glance-log" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.754199 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.756861 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.756860 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.779981 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.851697 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llw5p\" (UniqueName: \"kubernetes.io/projected/74508f35-c5cd-4e07-8883-831d2de65f35-kube-api-access-llw5p\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.852062 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.852224 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-logs\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.852390 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-config-data\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.852587 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-scripts\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.852703 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.852903 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.853062 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.956707 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-config-data\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.956792 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-scripts\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.956821 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.956915 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.956991 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.957063 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llw5p\" (UniqueName: \"kubernetes.io/projected/74508f35-c5cd-4e07-8883-831d2de65f35-kube-api-access-llw5p\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.957087 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.957121 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-logs\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.957720 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-logs\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.963628 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.965238 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.965449 4822 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.979648 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.980723 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-config-data\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.981649 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-scripts\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:37 crc kubenswrapper[4822]: I1201 07:12:37.998029 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llw5p\" (UniqueName: \"kubernetes.io/projected/74508f35-c5cd-4e07-8883-831d2de65f35-kube-api-access-llw5p\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.010816 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " pod="openstack/glance-default-external-api-0" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.085376 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.666876 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.696219 4822 generic.go:334] "Generic (PLEG): container finished" podID="69ebc96d-4310-46ec-a237-9f299e08dc64" containerID="9f565c712bfac69eb668bb277f3e84214426f99043d5e8407c46ddd71a4c7191" exitCode=0 Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.696285 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" event={"ID":"69ebc96d-4310-46ec-a237-9f299e08dc64","Type":"ContainerDied","Data":"9f565c712bfac69eb668bb277f3e84214426f99043d5e8407c46ddd71a4c7191"} Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.936109 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.936166 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.963300 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="911beba1-8e38-4051-a88c-71e0a9ac20a4" path="/var/lib/kubelet/pods/911beba1-8e38-4051-a88c-71e0a9ac20a4/volumes" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.972340 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:38 crc kubenswrapper[4822]: I1201 07:12:38.991766 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:39 crc kubenswrapper[4822]: I1201 07:12:39.708980 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"74508f35-c5cd-4e07-8883-831d2de65f35","Type":"ContainerStarted","Data":"8c7dc884cc27a11dfe3c85bef88b9e7c88b3ac50ba6cb9054583b104030b6680"} Dec 01 07:12:39 crc kubenswrapper[4822]: I1201 07:12:39.709378 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:39 crc kubenswrapper[4822]: I1201 07:12:39.709397 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"74508f35-c5cd-4e07-8883-831d2de65f35","Type":"ContainerStarted","Data":"4711cfe676f2c790f085f0e01970d48fc145022b8fd3ff571d022fde881b6a78"} Dec 01 07:12:39 crc kubenswrapper[4822]: I1201 07:12:39.709411 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.118501 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.201002 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrm6b\" (UniqueName: \"kubernetes.io/projected/69ebc96d-4310-46ec-a237-9f299e08dc64-kube-api-access-lrm6b\") pod \"69ebc96d-4310-46ec-a237-9f299e08dc64\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.201078 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-scripts\") pod \"69ebc96d-4310-46ec-a237-9f299e08dc64\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.201212 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-combined-ca-bundle\") pod \"69ebc96d-4310-46ec-a237-9f299e08dc64\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.201298 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-config-data\") pod \"69ebc96d-4310-46ec-a237-9f299e08dc64\" (UID: \"69ebc96d-4310-46ec-a237-9f299e08dc64\") " Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.206087 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-scripts" (OuterVolumeSpecName: "scripts") pod "69ebc96d-4310-46ec-a237-9f299e08dc64" (UID: "69ebc96d-4310-46ec-a237-9f299e08dc64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.206138 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69ebc96d-4310-46ec-a237-9f299e08dc64-kube-api-access-lrm6b" (OuterVolumeSpecName: "kube-api-access-lrm6b") pod "69ebc96d-4310-46ec-a237-9f299e08dc64" (UID: "69ebc96d-4310-46ec-a237-9f299e08dc64"). InnerVolumeSpecName "kube-api-access-lrm6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.227886 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-config-data" (OuterVolumeSpecName: "config-data") pod "69ebc96d-4310-46ec-a237-9f299e08dc64" (UID: "69ebc96d-4310-46ec-a237-9f299e08dc64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.247454 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69ebc96d-4310-46ec-a237-9f299e08dc64" (UID: "69ebc96d-4310-46ec-a237-9f299e08dc64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.303421 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.303464 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.303480 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrm6b\" (UniqueName: \"kubernetes.io/projected/69ebc96d-4310-46ec-a237-9f299e08dc64-kube-api-access-lrm6b\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.303493 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69ebc96d-4310-46ec-a237-9f299e08dc64-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.624484 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.724039 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"74508f35-c5cd-4e07-8883-831d2de65f35","Type":"ContainerStarted","Data":"70b0818800c2616bc33b123c3a4baa1c0af71f6cf5e88867c893507114ae9b3c"} Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.727290 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" event={"ID":"69ebc96d-4310-46ec-a237-9f299e08dc64","Type":"ContainerDied","Data":"e3f1a32a6a77baf5b5ea38bafcfffaf608ba2c7cd4cff5e2f3d1c177d53e5d2b"} Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.727657 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3f1a32a6a77baf5b5ea38bafcfffaf608ba2c7cd4cff5e2f3d1c177d53e5d2b" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.727291 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vgj7h" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.766054 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.766035744 podStartE2EDuration="3.766035744s" podCreationTimestamp="2025-12-01 07:12:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:40.745590209 +0000 UTC m=+1316.066397915" watchObservedRunningTime="2025-12-01 07:12:40.766035744 +0000 UTC m=+1316.086843430" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.827450 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 07:12:40 crc kubenswrapper[4822]: E1201 07:12:40.828007 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ebc96d-4310-46ec-a237-9f299e08dc64" containerName="nova-cell0-conductor-db-sync" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.828032 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ebc96d-4310-46ec-a237-9f299e08dc64" containerName="nova-cell0-conductor-db-sync" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.828320 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="69ebc96d-4310-46ec-a237-9f299e08dc64" containerName="nova-cell0-conductor-db-sync" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.829106 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.831295 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.836780 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hl2xx" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.857723 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.914165 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.914228 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:40 crc kubenswrapper[4822]: I1201 07:12:40.914584 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5zk8\" (UniqueName: \"kubernetes.io/projected/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-kube-api-access-m5zk8\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.016861 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.016930 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.017019 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5zk8\" (UniqueName: \"kubernetes.io/projected/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-kube-api-access-m5zk8\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.021867 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.032409 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.035182 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5zk8\" (UniqueName: \"kubernetes.io/projected/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-kube-api-access-m5zk8\") pod \"nova-cell0-conductor-0\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.154777 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:41 crc kubenswrapper[4822]: W1201 07:12:41.660853 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda206e77a_0c4d_49bb_b6d9_c0d18990bd54.slice/crio-696ee146333e2119dcf516074a8b9d51c8302831c15bae32ab23e82b66c6dba0 WatchSource:0}: Error finding container 696ee146333e2119dcf516074a8b9d51c8302831c15bae32ab23e82b66c6dba0: Status 404 returned error can't find the container with id 696ee146333e2119dcf516074a8b9d51c8302831c15bae32ab23e82b66c6dba0 Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.664703 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.738278 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a206e77a-0c4d-49bb-b6d9-c0d18990bd54","Type":"ContainerStarted","Data":"696ee146333e2119dcf516074a8b9d51c8302831c15bae32ab23e82b66c6dba0"} Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.797748 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.797863 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 07:12:41 crc kubenswrapper[4822]: I1201 07:12:41.859817 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 07:12:42 crc kubenswrapper[4822]: I1201 07:12:42.542985 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:12:42 crc kubenswrapper[4822]: I1201 07:12:42.543315 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:12:42 crc kubenswrapper[4822]: I1201 07:12:42.753942 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a206e77a-0c4d-49bb-b6d9-c0d18990bd54","Type":"ContainerStarted","Data":"aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c"} Dec 01 07:12:42 crc kubenswrapper[4822]: I1201 07:12:42.754018 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:42 crc kubenswrapper[4822]: I1201 07:12:42.780111 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.780094946 podStartE2EDuration="2.780094946s" podCreationTimestamp="2025-12-01 07:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:42.779122708 +0000 UTC m=+1318.099930404" watchObservedRunningTime="2025-12-01 07:12:42.780094946 +0000 UTC m=+1318.100902632" Dec 01 07:12:45 crc kubenswrapper[4822]: I1201 07:12:45.828022 4822 generic.go:334] "Generic (PLEG): container finished" podID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerID="541a3b4d3454cc4b59791b20d14807f08e225f86ccc5af8d74f1fada1a910b56" exitCode=137 Dec 01 07:12:45 crc kubenswrapper[4822]: I1201 07:12:45.828089 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerDied","Data":"541a3b4d3454cc4b59791b20d14807f08e225f86ccc5af8d74f1fada1a910b56"} Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.199786 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.348842 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.457979 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-sg-core-conf-yaml\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.458178 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlqfl\" (UniqueName: \"kubernetes.io/projected/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-kube-api-access-tlqfl\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.458273 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-log-httpd\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.458338 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-run-httpd\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.458450 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-combined-ca-bundle\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.458613 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-scripts\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.458667 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-config-data\") pod \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\" (UID: \"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad\") " Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.459002 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.459748 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.459723 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.465789 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-scripts" (OuterVolumeSpecName: "scripts") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.478194 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-kube-api-access-tlqfl" (OuterVolumeSpecName: "kube-api-access-tlqfl") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "kube-api-access-tlqfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.484506 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.562019 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.562066 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlqfl\" (UniqueName: \"kubernetes.io/projected/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-kube-api-access-tlqfl\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.562091 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.562109 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.580875 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-config-data" (OuterVolumeSpecName: "config-data") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.581399 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" (UID: "cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.663980 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.664024 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.689329 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-fqb49"] Dec 01 07:12:46 crc kubenswrapper[4822]: E1201 07:12:46.689779 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="proxy-httpd" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.689799 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="proxy-httpd" Dec 01 07:12:46 crc kubenswrapper[4822]: E1201 07:12:46.689813 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="sg-core" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.689821 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="sg-core" Dec 01 07:12:46 crc kubenswrapper[4822]: E1201 07:12:46.689854 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-central-agent" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.689863 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-central-agent" Dec 01 07:12:46 crc kubenswrapper[4822]: E1201 07:12:46.689884 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-notification-agent" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.689892 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-notification-agent" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.690116 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-notification-agent" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.690140 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="proxy-httpd" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.690156 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="ceilometer-central-agent" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.690169 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" containerName="sg-core" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.690888 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.693177 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.693398 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.699956 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fqb49"] Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.766028 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6979\" (UniqueName: \"kubernetes.io/projected/7a55bca0-5d9e-4486-901b-829e001c1b16-kube-api-access-l6979\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.766129 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-scripts\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.766183 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.766209 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-config-data\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.843107 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad","Type":"ContainerDied","Data":"d445e763f10464318061be1fb1d7f7151a95df6c4f78f2cb0bf7ea1306d536ae"} Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.843195 4822 scope.go:117] "RemoveContainer" containerID="541a3b4d3454cc4b59791b20d14807f08e225f86ccc5af8d74f1fada1a910b56" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.843355 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.868526 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6979\" (UniqueName: \"kubernetes.io/projected/7a55bca0-5d9e-4486-901b-829e001c1b16-kube-api-access-l6979\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.868640 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-scripts\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.868689 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.868711 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-config-data\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.891355 4822 scope.go:117] "RemoveContainer" containerID="b3a17d034d0e1ec0604272df30590fe25466abd87114242078b5ac2912336f9e" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.891699 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-scripts\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.892290 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.899726 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-config-data\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.904969 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6979\" (UniqueName: \"kubernetes.io/projected/7a55bca0-5d9e-4486-901b-829e001c1b16-kube-api-access-l6979\") pod \"nova-cell0-cell-mapping-fqb49\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.922225 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.924088 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.928443 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.931420 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.942430 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.970445 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad" path="/var/lib/kubelet/pods/cf6ad07c-9fbe-4b68-ade4-1ab1b755e3ad/volumes" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.971502 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.983771 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57rr7\" (UniqueName: \"kubernetes.io/projected/4c7e2928-19c6-4f6b-a697-179b6251219a-kube-api-access-57rr7\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.983810 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.983845 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-config-data\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.983864 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c7e2928-19c6-4f6b-a697-179b6251219a-logs\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.985143 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:46 crc kubenswrapper[4822]: I1201 07:12:46.987279 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.002741 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.005450 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.020176 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.023466 4822 scope.go:117] "RemoveContainer" containerID="1307621a19c792e25871c90ba67497ae97b641c73af76fcf3d9d883af39283df" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.028658 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.031094 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.033507 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.065716 4822 scope.go:117] "RemoveContainer" containerID="9d8faff32bf994bf2c47b57483f4c6c52965e82a69a3187835b490f57b35a682" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.076530 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085439 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085489 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-run-httpd\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085624 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-config-data\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085653 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-scripts\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085741 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57rr7\" (UniqueName: \"kubernetes.io/projected/4c7e2928-19c6-4f6b-a697-179b6251219a-kube-api-access-57rr7\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085769 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085792 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085820 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085857 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-config-data\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085877 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-config-data\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085904 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c7e2928-19c6-4f6b-a697-179b6251219a-logs\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085925 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbphf\" (UniqueName: \"kubernetes.io/projected/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-kube-api-access-pbphf\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085956 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76fkz\" (UniqueName: \"kubernetes.io/projected/22a864cf-abdf-4cb9-9af4-61afede19cbf-kube-api-access-76fkz\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.085975 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-log-httpd\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.088197 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c7e2928-19c6-4f6b-a697-179b6251219a-logs\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.111286 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-config-data\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.111805 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.115692 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.119680 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.119914 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57rr7\" (UniqueName: \"kubernetes.io/projected/4c7e2928-19c6-4f6b-a697-179b6251219a-kube-api-access-57rr7\") pod \"nova-api-0\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.124126 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.214687 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbphf\" (UniqueName: \"kubernetes.io/projected/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-kube-api-access-pbphf\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215029 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76fkz\" (UniqueName: \"kubernetes.io/projected/22a864cf-abdf-4cb9-9af4-61afede19cbf-kube-api-access-76fkz\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215057 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-log-httpd\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215088 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215178 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215251 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-run-httpd\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215347 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2522aa-c494-4b9a-a756-cb763cd85d19-logs\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215384 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzsvh\" (UniqueName: \"kubernetes.io/projected/dc2522aa-c494-4b9a-a756-cb763cd85d19-kube-api-access-wzsvh\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215419 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-config-data\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215469 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-scripts\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215562 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215588 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215616 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-config-data\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.215654 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-config-data\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.227277 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.227709 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-run-httpd\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.227987 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-log-httpd\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.237074 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.237176 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-config-data\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.241651 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-config-data\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.242556 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-scripts\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.245066 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76fkz\" (UniqueName: \"kubernetes.io/projected/22a864cf-abdf-4cb9-9af4-61afede19cbf-kube-api-access-76fkz\") pod \"nova-scheduler-0\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.247630 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.252927 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbphf\" (UniqueName: \"kubernetes.io/projected/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-kube-api-access-pbphf\") pod \"ceilometer-0\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.258653 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.296403 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.317638 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2522aa-c494-4b9a-a756-cb763cd85d19-logs\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.317694 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzsvh\" (UniqueName: \"kubernetes.io/projected/dc2522aa-c494-4b9a-a756-cb763cd85d19-kube-api-access-wzsvh\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.317761 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-config-data\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.317802 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.319856 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.320692 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2522aa-c494-4b9a-a756-cb763cd85d19-logs\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.328004 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-config-data\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.330020 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.330313 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.342288 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzsvh\" (UniqueName: \"kubernetes.io/projected/dc2522aa-c494-4b9a-a756-cb763cd85d19-kube-api-access-wzsvh\") pod \"nova-metadata-0\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.349563 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.379815 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.383598 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.387266 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.392696 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-d9p82"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.394756 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.407674 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-d9p82"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.425782 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.456666 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.520995 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-config\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521065 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-nb\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521125 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-swift-storage-0\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521152 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521187 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-svc\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521216 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbdk9\" (UniqueName: \"kubernetes.io/projected/bea1d532-5241-4968-84c8-badda78b7e79-kube-api-access-zbdk9\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521274 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt9ww\" (UniqueName: \"kubernetes.io/projected/4c74c45e-c6e3-4cf8-8202-da93ad73131c-kube-api-access-vt9ww\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521309 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-sb\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.521355 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625456 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625505 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-svc\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625583 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbdk9\" (UniqueName: \"kubernetes.io/projected/bea1d532-5241-4968-84c8-badda78b7e79-kube-api-access-zbdk9\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625639 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt9ww\" (UniqueName: \"kubernetes.io/projected/4c74c45e-c6e3-4cf8-8202-da93ad73131c-kube-api-access-vt9ww\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625672 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-sb\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625702 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625741 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-config\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625769 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-nb\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.625802 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-swift-storage-0\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.626670 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-swift-storage-0\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.627183 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-config\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.627812 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-svc\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.628503 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-nb\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.630357 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-sb\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.635884 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.643493 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.643652 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbdk9\" (UniqueName: \"kubernetes.io/projected/bea1d532-5241-4968-84c8-badda78b7e79-kube-api-access-zbdk9\") pod \"dnsmasq-dns-5594d9b959-d9p82\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.647460 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt9ww\" (UniqueName: \"kubernetes.io/projected/4c74c45e-c6e3-4cf8-8202-da93ad73131c-kube-api-access-vt9ww\") pod \"nova-cell1-novncproxy-0\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.718014 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fqb49"] Dec 01 07:12:47 crc kubenswrapper[4822]: W1201 07:12:47.722602 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a55bca0_5d9e_4486_901b_829e001c1b16.slice/crio-3fee81a9f5e2e978b34c936f3ed04c90d8f82ca07a8141a28fbb36e3d2e46e3c WatchSource:0}: Error finding container 3fee81a9f5e2e978b34c936f3ed04c90d8f82ca07a8141a28fbb36e3d2e46e3c: Status 404 returned error can't find the container with id 3fee81a9f5e2e978b34c936f3ed04c90d8f82ca07a8141a28fbb36e3d2e46e3c Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.730516 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.761289 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.856691 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s65nh"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.858993 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.863646 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.863768 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.876843 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s65nh"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.902745 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fqb49" event={"ID":"7a55bca0-5d9e-4486-901b-829e001c1b16","Type":"ContainerStarted","Data":"3fee81a9f5e2e978b34c936f3ed04c90d8f82ca07a8141a28fbb36e3d2e46e3c"} Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.963374 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:12:47 crc kubenswrapper[4822]: I1201 07:12:47.978287 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.004510 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:12:48 crc kubenswrapper[4822]: W1201 07:12:48.010229 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22a864cf_abdf_4cb9_9af4_61afede19cbf.slice/crio-ee4256ee2f7284d4f0f4d24f67f354cd5f2b69a6425aab847882f9dd8332531a WatchSource:0}: Error finding container ee4256ee2f7284d4f0f4d24f67f354cd5f2b69a6425aab847882f9dd8332531a: Status 404 returned error can't find the container with id ee4256ee2f7284d4f0f4d24f67f354cd5f2b69a6425aab847882f9dd8332531a Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.034487 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-config-data\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.034559 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zldjl\" (UniqueName: \"kubernetes.io/projected/b30c0ac5-fe50-4055-b46c-49928b99d337-kube-api-access-zldjl\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.034762 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.034957 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-scripts\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.086204 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.086258 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.122819 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.137485 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zldjl\" (UniqueName: \"kubernetes.io/projected/b30c0ac5-fe50-4055-b46c-49928b99d337-kube-api-access-zldjl\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.137566 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.137646 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-scripts\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.137781 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-config-data\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.144096 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-config-data\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.149153 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-scripts\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.152568 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.152833 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.163857 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.164157 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zldjl\" (UniqueName: \"kubernetes.io/projected/b30c0ac5-fe50-4055-b46c-49928b99d337-kube-api-access-zldjl\") pod \"nova-cell1-conductor-db-sync-s65nh\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.231958 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-d9p82"] Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.255856 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.310566 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.709790 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s65nh"] Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.922540 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fqb49" event={"ID":"7a55bca0-5d9e-4486-901b-829e001c1b16","Type":"ContainerStarted","Data":"2ed851f5731fcc9e9358b19e778659d1ad17875e1364d03170e682f75a7df687"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.927036 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s65nh" event={"ID":"b30c0ac5-fe50-4055-b46c-49928b99d337","Type":"ContainerStarted","Data":"b4e6c5b121170cc82aa94a4280a2d23bd645522b7b584b96e26d084bdfc734dd"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.930967 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerStarted","Data":"edd2e52b36aa0e0d0f177b5e2e88609567e2351967e657b9baa1b87e137ef445"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.932410 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c7e2928-19c6-4f6b-a697-179b6251219a","Type":"ContainerStarted","Data":"9fe9c6f11a20b590e998c04132f83202ce0ff1455bf66f25449443d223315bd8"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.935338 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4c74c45e-c6e3-4cf8-8202-da93ad73131c","Type":"ContainerStarted","Data":"2d43323a986613f89b1f4639556c1762e0ac8a2f509bfac4feadce76cfd1b2c0"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.951986 4822 generic.go:334] "Generic (PLEG): container finished" podID="bea1d532-5241-4968-84c8-badda78b7e79" containerID="16df184a4326cf5ba031b16cfb28020c3343765b30ee9bb4b3f4e9bb2c017914" exitCode=0 Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.952057 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" event={"ID":"bea1d532-5241-4968-84c8-badda78b7e79","Type":"ContainerDied","Data":"16df184a4326cf5ba031b16cfb28020c3343765b30ee9bb4b3f4e9bb2c017914"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.952081 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" event={"ID":"bea1d532-5241-4968-84c8-badda78b7e79","Type":"ContainerStarted","Data":"22e69159f758a608c1626ad031b5f8b89a1ce4ecaa6e23656e79401d9bed4759"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.981528 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.981906 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.981975 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"22a864cf-abdf-4cb9-9af4-61afede19cbf","Type":"ContainerStarted","Data":"ee4256ee2f7284d4f0f4d24f67f354cd5f2b69a6425aab847882f9dd8332531a"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.982049 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc2522aa-c494-4b9a-a756-cb763cd85d19","Type":"ContainerStarted","Data":"f62cd0b2929ded22ac155741f36420350d80e74033217210813018cd17d3c12b"} Dec 01 07:12:48 crc kubenswrapper[4822]: I1201 07:12:48.982436 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-fqb49" podStartSLOduration=2.982415794 podStartE2EDuration="2.982415794s" podCreationTimestamp="2025-12-01 07:12:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:48.947869413 +0000 UTC m=+1324.268677099" watchObservedRunningTime="2025-12-01 07:12:48.982415794 +0000 UTC m=+1324.303223480" Dec 01 07:12:49 crc kubenswrapper[4822]: I1201 07:12:49.982899 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s65nh" event={"ID":"b30c0ac5-fe50-4055-b46c-49928b99d337","Type":"ContainerStarted","Data":"6dbc8d38bd7d89b31489d0532e8c45abe57eb433a927646e26ebb4f7bc31c405"} Dec 01 07:12:49 crc kubenswrapper[4822]: I1201 07:12:49.986803 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerStarted","Data":"379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381"} Dec 01 07:12:49 crc kubenswrapper[4822]: I1201 07:12:49.989904 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" event={"ID":"bea1d532-5241-4968-84c8-badda78b7e79","Type":"ContainerStarted","Data":"18522af8c645a747d69c9facd364e98dd7884610f65cb4041ab9af854278bd63"} Dec 01 07:12:49 crc kubenswrapper[4822]: I1201 07:12:49.990166 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:50 crc kubenswrapper[4822]: I1201 07:12:49.999401 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-s65nh" podStartSLOduration=2.999384685 podStartE2EDuration="2.999384685s" podCreationTimestamp="2025-12-01 07:12:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:49.9992295 +0000 UTC m=+1325.320037186" watchObservedRunningTime="2025-12-01 07:12:49.999384685 +0000 UTC m=+1325.320192371" Dec 01 07:12:50 crc kubenswrapper[4822]: I1201 07:12:50.056160 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" podStartSLOduration=3.056140661 podStartE2EDuration="3.056140661s" podCreationTimestamp="2025-12-01 07:12:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:12:50.049489994 +0000 UTC m=+1325.370297680" watchObservedRunningTime="2025-12-01 07:12:50.056140661 +0000 UTC m=+1325.376948347" Dec 01 07:12:51 crc kubenswrapper[4822]: I1201 07:12:51.002886 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 07:12:51 crc kubenswrapper[4822]: I1201 07:12:51.003283 4822 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 07:12:51 crc kubenswrapper[4822]: I1201 07:12:51.106674 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 07:12:51 crc kubenswrapper[4822]: I1201 07:12:51.175816 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 07:12:51 crc kubenswrapper[4822]: I1201 07:12:51.320956 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:12:51 crc kubenswrapper[4822]: I1201 07:12:51.348654 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.038349 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4c74c45e-c6e3-4cf8-8202-da93ad73131c","Type":"ContainerStarted","Data":"84140cd9805af6c96d907476731a1fa2f264694b115aeb7393dfaadaf128d26b"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.038391 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="4c74c45e-c6e3-4cf8-8202-da93ad73131c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://84140cd9805af6c96d907476731a1fa2f264694b115aeb7393dfaadaf128d26b" gracePeriod=30 Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.040500 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"22a864cf-abdf-4cb9-9af4-61afede19cbf","Type":"ContainerStarted","Data":"b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.051744 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc2522aa-c494-4b9a-a756-cb763cd85d19","Type":"ContainerStarted","Data":"3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.051793 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc2522aa-c494-4b9a-a756-cb763cd85d19","Type":"ContainerStarted","Data":"2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.051959 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-log" containerID="cri-o://2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b" gracePeriod=30 Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.052148 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-metadata" containerID="cri-o://3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e" gracePeriod=30 Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.057455 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerStarted","Data":"5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.065388 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.367913816 podStartE2EDuration="6.06536319s" podCreationTimestamp="2025-12-01 07:12:47 +0000 UTC" firstStartedPulling="2025-12-01 07:12:48.311780995 +0000 UTC m=+1323.632588681" lastFinishedPulling="2025-12-01 07:12:52.009230359 +0000 UTC m=+1327.330038055" observedRunningTime="2025-12-01 07:12:53.050134771 +0000 UTC m=+1328.370942447" watchObservedRunningTime="2025-12-01 07:12:53.06536319 +0000 UTC m=+1328.386170876" Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.068628 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c7e2928-19c6-4f6b-a697-179b6251219a","Type":"ContainerStarted","Data":"62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.068665 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c7e2928-19c6-4f6b-a697-179b6251219a","Type":"ContainerStarted","Data":"438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95"} Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.073663 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.091528643 podStartE2EDuration="7.073645783s" podCreationTimestamp="2025-12-01 07:12:46 +0000 UTC" firstStartedPulling="2025-12-01 07:12:48.015500692 +0000 UTC m=+1323.336308368" lastFinishedPulling="2025-12-01 07:12:51.997617822 +0000 UTC m=+1327.318425508" observedRunningTime="2025-12-01 07:12:53.066584444 +0000 UTC m=+1328.387392130" watchObservedRunningTime="2025-12-01 07:12:53.073645783 +0000 UTC m=+1328.394453469" Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.095346 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.231807698 podStartE2EDuration="6.095324762s" podCreationTimestamp="2025-12-01 07:12:47 +0000 UTC" firstStartedPulling="2025-12-01 07:12:48.145740565 +0000 UTC m=+1323.466548251" lastFinishedPulling="2025-12-01 07:12:52.009257629 +0000 UTC m=+1327.330065315" observedRunningTime="2025-12-01 07:12:53.093132511 +0000 UTC m=+1328.413940197" watchObservedRunningTime="2025-12-01 07:12:53.095324762 +0000 UTC m=+1328.416132448" Dec 01 07:12:53 crc kubenswrapper[4822]: I1201 07:12:53.127152 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.1198768709999998 podStartE2EDuration="7.127127647s" podCreationTimestamp="2025-12-01 07:12:46 +0000 UTC" firstStartedPulling="2025-12-01 07:12:48.000620904 +0000 UTC m=+1323.321428590" lastFinishedPulling="2025-12-01 07:12:52.00787168 +0000 UTC m=+1327.328679366" observedRunningTime="2025-12-01 07:12:53.111909649 +0000 UTC m=+1328.432717335" watchObservedRunningTime="2025-12-01 07:12:53.127127647 +0000 UTC m=+1328.447935333" Dec 01 07:12:54 crc kubenswrapper[4822]: I1201 07:12:54.079526 4822 generic.go:334] "Generic (PLEG): container finished" podID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerID="2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b" exitCode=143 Dec 01 07:12:54 crc kubenswrapper[4822]: I1201 07:12:54.079580 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc2522aa-c494-4b9a-a756-cb763cd85d19","Type":"ContainerDied","Data":"2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b"} Dec 01 07:12:54 crc kubenswrapper[4822]: I1201 07:12:54.082852 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerStarted","Data":"9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555"} Dec 01 07:12:56 crc kubenswrapper[4822]: I1201 07:12:56.106832 4822 generic.go:334] "Generic (PLEG): container finished" podID="7a55bca0-5d9e-4486-901b-829e001c1b16" containerID="2ed851f5731fcc9e9358b19e778659d1ad17875e1364d03170e682f75a7df687" exitCode=0 Dec 01 07:12:56 crc kubenswrapper[4822]: I1201 07:12:56.106928 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fqb49" event={"ID":"7a55bca0-5d9e-4486-901b-829e001c1b16","Type":"ContainerDied","Data":"2ed851f5731fcc9e9358b19e778659d1ad17875e1364d03170e682f75a7df687"} Dec 01 07:12:56 crc kubenswrapper[4822]: I1201 07:12:56.112094 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerStarted","Data":"b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6"} Dec 01 07:12:56 crc kubenswrapper[4822]: I1201 07:12:56.112348 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:12:56 crc kubenswrapper[4822]: I1201 07:12:56.166550 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.962861015 podStartE2EDuration="10.166524004s" podCreationTimestamp="2025-12-01 07:12:46 +0000 UTC" firstStartedPulling="2025-12-01 07:12:48.032449899 +0000 UTC m=+1323.353257585" lastFinishedPulling="2025-12-01 07:12:55.236112888 +0000 UTC m=+1330.556920574" observedRunningTime="2025-12-01 07:12:56.157712686 +0000 UTC m=+1331.478520392" watchObservedRunningTime="2025-12-01 07:12:56.166524004 +0000 UTC m=+1331.487331690" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.126639 4822 generic.go:334] "Generic (PLEG): container finished" podID="b30c0ac5-fe50-4055-b46c-49928b99d337" containerID="6dbc8d38bd7d89b31489d0532e8c45abe57eb433a927646e26ebb4f7bc31c405" exitCode=0 Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.126677 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s65nh" event={"ID":"b30c0ac5-fe50-4055-b46c-49928b99d337","Type":"ContainerDied","Data":"6dbc8d38bd7d89b31489d0532e8c45abe57eb433a927646e26ebb4f7bc31c405"} Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.320908 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.320978 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.354742 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.355181 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.402912 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.457084 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.457456 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.550843 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.607326 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6979\" (UniqueName: \"kubernetes.io/projected/7a55bca0-5d9e-4486-901b-829e001c1b16-kube-api-access-l6979\") pod \"7a55bca0-5d9e-4486-901b-829e001c1b16\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.607464 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-config-data\") pod \"7a55bca0-5d9e-4486-901b-829e001c1b16\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.607610 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-scripts\") pod \"7a55bca0-5d9e-4486-901b-829e001c1b16\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.607724 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-combined-ca-bundle\") pod \"7a55bca0-5d9e-4486-901b-829e001c1b16\" (UID: \"7a55bca0-5d9e-4486-901b-829e001c1b16\") " Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.615399 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a55bca0-5d9e-4486-901b-829e001c1b16-kube-api-access-l6979" (OuterVolumeSpecName: "kube-api-access-l6979") pod "7a55bca0-5d9e-4486-901b-829e001c1b16" (UID: "7a55bca0-5d9e-4486-901b-829e001c1b16"). InnerVolumeSpecName "kube-api-access-l6979". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.615836 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-scripts" (OuterVolumeSpecName: "scripts") pod "7a55bca0-5d9e-4486-901b-829e001c1b16" (UID: "7a55bca0-5d9e-4486-901b-829e001c1b16"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.636257 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a55bca0-5d9e-4486-901b-829e001c1b16" (UID: "7a55bca0-5d9e-4486-901b-829e001c1b16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.650929 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-config-data" (OuterVolumeSpecName: "config-data") pod "7a55bca0-5d9e-4486-901b-829e001c1b16" (UID: "7a55bca0-5d9e-4486-901b-829e001c1b16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.710917 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.711076 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.711136 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a55bca0-5d9e-4486-901b-829e001c1b16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.711219 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6979\" (UniqueName: \"kubernetes.io/projected/7a55bca0-5d9e-4486-901b-829e001c1b16-kube-api-access-l6979\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.732442 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.763846 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.835416 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-h7s98"] Dec 01 07:12:57 crc kubenswrapper[4822]: I1201 07:12:57.835674 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerName="dnsmasq-dns" containerID="cri-o://3a1ad89ce942377a7fd0f9f31e663bc1c039f8c7a99ef0ea4a07765e450096f5" gracePeriod=10 Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.151693 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fqb49" event={"ID":"7a55bca0-5d9e-4486-901b-829e001c1b16","Type":"ContainerDied","Data":"3fee81a9f5e2e978b34c936f3ed04c90d8f82ca07a8141a28fbb36e3d2e46e3c"} Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.151961 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fee81a9f5e2e978b34c936f3ed04c90d8f82ca07a8141a28fbb36e3d2e46e3c" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.152031 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fqb49" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.160310 4822 generic.go:334] "Generic (PLEG): container finished" podID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerID="3a1ad89ce942377a7fd0f9f31e663bc1c039f8c7a99ef0ea4a07765e450096f5" exitCode=0 Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.161322 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" event={"ID":"149ce159-6c78-4062-8145-5aa08ff2f3fb","Type":"ContainerDied","Data":"3a1ad89ce942377a7fd0f9f31e663bc1c039f8c7a99ef0ea4a07765e450096f5"} Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.221302 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.255773 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.256091 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-log" containerID="cri-o://438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95" gracePeriod=30 Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.256241 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-api" containerID="cri-o://62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1" gracePeriod=30 Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.284917 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": EOF" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.285321 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": EOF" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.289664 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.337814 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.439345 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-swift-storage-0\") pod \"149ce159-6c78-4062-8145-5aa08ff2f3fb\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.439399 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-svc\") pod \"149ce159-6c78-4062-8145-5aa08ff2f3fb\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.439422 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-sb\") pod \"149ce159-6c78-4062-8145-5aa08ff2f3fb\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.439461 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-nb\") pod \"149ce159-6c78-4062-8145-5aa08ff2f3fb\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.439506 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-config\") pod \"149ce159-6c78-4062-8145-5aa08ff2f3fb\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.439592 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlnn6\" (UniqueName: \"kubernetes.io/projected/149ce159-6c78-4062-8145-5aa08ff2f3fb-kube-api-access-jlnn6\") pod \"149ce159-6c78-4062-8145-5aa08ff2f3fb\" (UID: \"149ce159-6c78-4062-8145-5aa08ff2f3fb\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.473693 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/149ce159-6c78-4062-8145-5aa08ff2f3fb-kube-api-access-jlnn6" (OuterVolumeSpecName: "kube-api-access-jlnn6") pod "149ce159-6c78-4062-8145-5aa08ff2f3fb" (UID: "149ce159-6c78-4062-8145-5aa08ff2f3fb"). InnerVolumeSpecName "kube-api-access-jlnn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.530616 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-config" (OuterVolumeSpecName: "config") pod "149ce159-6c78-4062-8145-5aa08ff2f3fb" (UID: "149ce159-6c78-4062-8145-5aa08ff2f3fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.541507 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.541534 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlnn6\" (UniqueName: \"kubernetes.io/projected/149ce159-6c78-4062-8145-5aa08ff2f3fb-kube-api-access-jlnn6\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.542057 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "149ce159-6c78-4062-8145-5aa08ff2f3fb" (UID: "149ce159-6c78-4062-8145-5aa08ff2f3fb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.561125 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "149ce159-6c78-4062-8145-5aa08ff2f3fb" (UID: "149ce159-6c78-4062-8145-5aa08ff2f3fb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.593175 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "149ce159-6c78-4062-8145-5aa08ff2f3fb" (UID: "149ce159-6c78-4062-8145-5aa08ff2f3fb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.606152 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "149ce159-6c78-4062-8145-5aa08ff2f3fb" (UID: "149ce159-6c78-4062-8145-5aa08ff2f3fb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.652064 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.652104 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.652114 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.652122 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/149ce159-6c78-4062-8145-5aa08ff2f3fb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.676931 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.753717 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zldjl\" (UniqueName: \"kubernetes.io/projected/b30c0ac5-fe50-4055-b46c-49928b99d337-kube-api-access-zldjl\") pod \"b30c0ac5-fe50-4055-b46c-49928b99d337\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.753839 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-config-data\") pod \"b30c0ac5-fe50-4055-b46c-49928b99d337\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.753913 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-scripts\") pod \"b30c0ac5-fe50-4055-b46c-49928b99d337\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.755391 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-combined-ca-bundle\") pod \"b30c0ac5-fe50-4055-b46c-49928b99d337\" (UID: \"b30c0ac5-fe50-4055-b46c-49928b99d337\") " Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.757583 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b30c0ac5-fe50-4055-b46c-49928b99d337-kube-api-access-zldjl" (OuterVolumeSpecName: "kube-api-access-zldjl") pod "b30c0ac5-fe50-4055-b46c-49928b99d337" (UID: "b30c0ac5-fe50-4055-b46c-49928b99d337"). InnerVolumeSpecName "kube-api-access-zldjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.764301 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-scripts" (OuterVolumeSpecName: "scripts") pod "b30c0ac5-fe50-4055-b46c-49928b99d337" (UID: "b30c0ac5-fe50-4055-b46c-49928b99d337"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.782727 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b30c0ac5-fe50-4055-b46c-49928b99d337" (UID: "b30c0ac5-fe50-4055-b46c-49928b99d337"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.783819 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-config-data" (OuterVolumeSpecName: "config-data") pod "b30c0ac5-fe50-4055-b46c-49928b99d337" (UID: "b30c0ac5-fe50-4055-b46c-49928b99d337"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.857489 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.857519 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.857530 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zldjl\" (UniqueName: \"kubernetes.io/projected/b30c0ac5-fe50-4055-b46c-49928b99d337-kube-api-access-zldjl\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:58 crc kubenswrapper[4822]: I1201 07:12:58.857541 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30c0ac5-fe50-4055-b46c-49928b99d337-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.175855 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s65nh" event={"ID":"b30c0ac5-fe50-4055-b46c-49928b99d337","Type":"ContainerDied","Data":"b4e6c5b121170cc82aa94a4280a2d23bd645522b7b584b96e26d084bdfc734dd"} Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.175897 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4e6c5b121170cc82aa94a4280a2d23bd645522b7b584b96e26d084bdfc734dd" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.175872 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s65nh" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.178678 4822 generic.go:334] "Generic (PLEG): container finished" podID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerID="438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95" exitCode=143 Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.178754 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c7e2928-19c6-4f6b-a697-179b6251219a","Type":"ContainerDied","Data":"438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95"} Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.187421 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" event={"ID":"149ce159-6c78-4062-8145-5aa08ff2f3fb","Type":"ContainerDied","Data":"0276a0de51fd2504956dbf7221f76fe6d60bd7f5410aa26f78fdb81073235634"} Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.187488 4822 scope.go:117] "RemoveContainer" containerID="3a1ad89ce942377a7fd0f9f31e663bc1c039f8c7a99ef0ea4a07765e450096f5" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.187709 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-h7s98" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.221616 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-h7s98"] Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.267935 4822 scope.go:117] "RemoveContainer" containerID="d0c017e15441d391d197b6e125bdf48aa979b07e3ae258a578511b6ca7160f47" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.298656 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-h7s98"] Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.326456 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 07:12:59 crc kubenswrapper[4822]: E1201 07:12:59.326920 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerName="init" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.326943 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerName="init" Dec 01 07:12:59 crc kubenswrapper[4822]: E1201 07:12:59.326958 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerName="dnsmasq-dns" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.326964 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerName="dnsmasq-dns" Dec 01 07:12:59 crc kubenswrapper[4822]: E1201 07:12:59.326998 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b30c0ac5-fe50-4055-b46c-49928b99d337" containerName="nova-cell1-conductor-db-sync" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.327005 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b30c0ac5-fe50-4055-b46c-49928b99d337" containerName="nova-cell1-conductor-db-sync" Dec 01 07:12:59 crc kubenswrapper[4822]: E1201 07:12:59.327021 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a55bca0-5d9e-4486-901b-829e001c1b16" containerName="nova-manage" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.327027 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a55bca0-5d9e-4486-901b-829e001c1b16" containerName="nova-manage" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.327188 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a55bca0-5d9e-4486-901b-829e001c1b16" containerName="nova-manage" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.327206 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b30c0ac5-fe50-4055-b46c-49928b99d337" containerName="nova-cell1-conductor-db-sync" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.327214 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" containerName="dnsmasq-dns" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.327870 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.336827 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.340306 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.477622 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.478053 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bm4g\" (UniqueName: \"kubernetes.io/projected/46169b3f-3e1f-4601-a82e-f3ea1bdde003-kube-api-access-7bm4g\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.478178 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.580074 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bm4g\" (UniqueName: \"kubernetes.io/projected/46169b3f-3e1f-4601-a82e-f3ea1bdde003-kube-api-access-7bm4g\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.580164 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.580193 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.585409 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.585885 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.616481 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bm4g\" (UniqueName: \"kubernetes.io/projected/46169b3f-3e1f-4601-a82e-f3ea1bdde003-kube-api-access-7bm4g\") pod \"nova-cell1-conductor-0\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " pod="openstack/nova-cell1-conductor-0" Dec 01 07:12:59 crc kubenswrapper[4822]: I1201 07:12:59.658292 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 07:13:00 crc kubenswrapper[4822]: I1201 07:13:00.157973 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 07:13:00 crc kubenswrapper[4822]: W1201 07:13:00.161540 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46169b3f_3e1f_4601_a82e_f3ea1bdde003.slice/crio-5022f3515428abc8ff3b3967e69b03fc5baecda0100975c5f849a1c02c12bed7 WatchSource:0}: Error finding container 5022f3515428abc8ff3b3967e69b03fc5baecda0100975c5f849a1c02c12bed7: Status 404 returned error can't find the container with id 5022f3515428abc8ff3b3967e69b03fc5baecda0100975c5f849a1c02c12bed7 Dec 01 07:13:00 crc kubenswrapper[4822]: I1201 07:13:00.198740 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"46169b3f-3e1f-4601-a82e-f3ea1bdde003","Type":"ContainerStarted","Data":"5022f3515428abc8ff3b3967e69b03fc5baecda0100975c5f849a1c02c12bed7"} Dec 01 07:13:00 crc kubenswrapper[4822]: I1201 07:13:00.198881 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="22a864cf-abdf-4cb9-9af4-61afede19cbf" containerName="nova-scheduler-scheduler" containerID="cri-o://b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4" gracePeriod=30 Dec 01 07:13:00 crc kubenswrapper[4822]: I1201 07:13:00.963038 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="149ce159-6c78-4062-8145-5aa08ff2f3fb" path="/var/lib/kubelet/pods/149ce159-6c78-4062-8145-5aa08ff2f3fb/volumes" Dec 01 07:13:01 crc kubenswrapper[4822]: I1201 07:13:01.212836 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"46169b3f-3e1f-4601-a82e-f3ea1bdde003","Type":"ContainerStarted","Data":"8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066"} Dec 01 07:13:01 crc kubenswrapper[4822]: I1201 07:13:01.213156 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 01 07:13:01 crc kubenswrapper[4822]: I1201 07:13:01.235955 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.235936262 podStartE2EDuration="2.235936262s" podCreationTimestamp="2025-12-01 07:12:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:01.23125661 +0000 UTC m=+1336.552064306" watchObservedRunningTime="2025-12-01 07:13:01.235936262 +0000 UTC m=+1336.556743948" Dec 01 07:13:02 crc kubenswrapper[4822]: E1201 07:13:02.354514 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:13:02 crc kubenswrapper[4822]: E1201 07:13:02.356619 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:13:02 crc kubenswrapper[4822]: E1201 07:13:02.358218 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:13:02 crc kubenswrapper[4822]: E1201 07:13:02.358281 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="22a864cf-abdf-4cb9-9af4-61afede19cbf" containerName="nova-scheduler-scheduler" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.182984 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.271183 4822 generic.go:334] "Generic (PLEG): container finished" podID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerID="62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1" exitCode=0 Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.271261 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.271284 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c7e2928-19c6-4f6b-a697-179b6251219a","Type":"ContainerDied","Data":"62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1"} Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.271607 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c7e2928-19c6-4f6b-a697-179b6251219a","Type":"ContainerDied","Data":"9fe9c6f11a20b590e998c04132f83202ce0ff1455bf66f25449443d223315bd8"} Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.271630 4822 scope.go:117] "RemoveContainer" containerID="62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.275247 4822 generic.go:334] "Generic (PLEG): container finished" podID="22a864cf-abdf-4cb9-9af4-61afede19cbf" containerID="b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4" exitCode=0 Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.275314 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"22a864cf-abdf-4cb9-9af4-61afede19cbf","Type":"ContainerDied","Data":"b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4"} Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.281076 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57rr7\" (UniqueName: \"kubernetes.io/projected/4c7e2928-19c6-4f6b-a697-179b6251219a-kube-api-access-57rr7\") pod \"4c7e2928-19c6-4f6b-a697-179b6251219a\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.281184 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-combined-ca-bundle\") pod \"4c7e2928-19c6-4f6b-a697-179b6251219a\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.281283 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-config-data\") pod \"4c7e2928-19c6-4f6b-a697-179b6251219a\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.281356 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c7e2928-19c6-4f6b-a697-179b6251219a-logs\") pod \"4c7e2928-19c6-4f6b-a697-179b6251219a\" (UID: \"4c7e2928-19c6-4f6b-a697-179b6251219a\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.282512 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c7e2928-19c6-4f6b-a697-179b6251219a-logs" (OuterVolumeSpecName: "logs") pod "4c7e2928-19c6-4f6b-a697-179b6251219a" (UID: "4c7e2928-19c6-4f6b-a697-179b6251219a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.288541 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c7e2928-19c6-4f6b-a697-179b6251219a-kube-api-access-57rr7" (OuterVolumeSpecName: "kube-api-access-57rr7") pod "4c7e2928-19c6-4f6b-a697-179b6251219a" (UID: "4c7e2928-19c6-4f6b-a697-179b6251219a"). InnerVolumeSpecName "kube-api-access-57rr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.306314 4822 scope.go:117] "RemoveContainer" containerID="438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.320686 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-config-data" (OuterVolumeSpecName: "config-data") pod "4c7e2928-19c6-4f6b-a697-179b6251219a" (UID: "4c7e2928-19c6-4f6b-a697-179b6251219a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.346013 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c7e2928-19c6-4f6b-a697-179b6251219a" (UID: "4c7e2928-19c6-4f6b-a697-179b6251219a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.383448 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.383473 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c7e2928-19c6-4f6b-a697-179b6251219a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.383483 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c7e2928-19c6-4f6b-a697-179b6251219a-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.383492 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57rr7\" (UniqueName: \"kubernetes.io/projected/4c7e2928-19c6-4f6b-a697-179b6251219a-kube-api-access-57rr7\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.448999 4822 scope.go:117] "RemoveContainer" containerID="62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1" Dec 01 07:13:04 crc kubenswrapper[4822]: E1201 07:13:04.449666 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1\": container with ID starting with 62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1 not found: ID does not exist" containerID="62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.449709 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1"} err="failed to get container status \"62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1\": rpc error: code = NotFound desc = could not find container \"62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1\": container with ID starting with 62c74d0c9bb2683460496060df6840bcb97d19037fa2a03153c3cbb10e246fb1 not found: ID does not exist" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.449736 4822 scope.go:117] "RemoveContainer" containerID="438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95" Dec 01 07:13:04 crc kubenswrapper[4822]: E1201 07:13:04.450165 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95\": container with ID starting with 438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95 not found: ID does not exist" containerID="438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.450195 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95"} err="failed to get container status \"438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95\": rpc error: code = NotFound desc = could not find container \"438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95\": container with ID starting with 438035d0a9524f623ce2ac376db3a94e279fd8a20e1e8161cdf7534c5cb7da95 not found: ID does not exist" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.463611 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.587261 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-config-data\") pod \"22a864cf-abdf-4cb9-9af4-61afede19cbf\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.587374 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76fkz\" (UniqueName: \"kubernetes.io/projected/22a864cf-abdf-4cb9-9af4-61afede19cbf-kube-api-access-76fkz\") pod \"22a864cf-abdf-4cb9-9af4-61afede19cbf\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.587531 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-combined-ca-bundle\") pod \"22a864cf-abdf-4cb9-9af4-61afede19cbf\" (UID: \"22a864cf-abdf-4cb9-9af4-61afede19cbf\") " Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.590703 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22a864cf-abdf-4cb9-9af4-61afede19cbf-kube-api-access-76fkz" (OuterVolumeSpecName: "kube-api-access-76fkz") pod "22a864cf-abdf-4cb9-9af4-61afede19cbf" (UID: "22a864cf-abdf-4cb9-9af4-61afede19cbf"). InnerVolumeSpecName "kube-api-access-76fkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.611421 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22a864cf-abdf-4cb9-9af4-61afede19cbf" (UID: "22a864cf-abdf-4cb9-9af4-61afede19cbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.613023 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.640630 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.642239 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-config-data" (OuterVolumeSpecName: "config-data") pod "22a864cf-abdf-4cb9-9af4-61afede19cbf" (UID: "22a864cf-abdf-4cb9-9af4-61afede19cbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.658316 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:04 crc kubenswrapper[4822]: E1201 07:13:04.658792 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22a864cf-abdf-4cb9-9af4-61afede19cbf" containerName="nova-scheduler-scheduler" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.658815 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="22a864cf-abdf-4cb9-9af4-61afede19cbf" containerName="nova-scheduler-scheduler" Dec 01 07:13:04 crc kubenswrapper[4822]: E1201 07:13:04.658846 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-api" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.658855 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-api" Dec 01 07:13:04 crc kubenswrapper[4822]: E1201 07:13:04.658874 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-log" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.658883 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-log" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.659150 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-log" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.659177 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" containerName="nova-api-api" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.659198 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="22a864cf-abdf-4cb9-9af4-61afede19cbf" containerName="nova-scheduler-scheduler" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.660477 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.663037 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.670515 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.689251 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.689285 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76fkz\" (UniqueName: \"kubernetes.io/projected/22a864cf-abdf-4cb9-9af4-61afede19cbf-kube-api-access-76fkz\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.689301 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22a864cf-abdf-4cb9-9af4-61afede19cbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.790681 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.790781 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-config-data\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.790843 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-logs\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.791002 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6djz\" (UniqueName: \"kubernetes.io/projected/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-kube-api-access-d6djz\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.893028 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-config-data\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.893105 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-logs\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.893156 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6djz\" (UniqueName: \"kubernetes.io/projected/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-kube-api-access-d6djz\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.893281 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.893718 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-logs\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.897856 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.898387 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-config-data\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.909021 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6djz\" (UniqueName: \"kubernetes.io/projected/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-kube-api-access-d6djz\") pod \"nova-api-0\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " pod="openstack/nova-api-0" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.965740 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c7e2928-19c6-4f6b-a697-179b6251219a" path="/var/lib/kubelet/pods/4c7e2928-19c6-4f6b-a697-179b6251219a/volumes" Dec 01 07:13:04 crc kubenswrapper[4822]: I1201 07:13:04.986927 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.286646 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.286837 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"22a864cf-abdf-4cb9-9af4-61afede19cbf","Type":"ContainerDied","Data":"ee4256ee2f7284d4f0f4d24f67f354cd5f2b69a6425aab847882f9dd8332531a"} Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.286903 4822 scope.go:117] "RemoveContainer" containerID="b71c48e5426a382940701c02c58cb6292249ab997e440e24cba779a5e42d82c4" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.319607 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.334526 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.347720 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.349702 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.352952 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.371541 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.420722 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.420874 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbg26\" (UniqueName: \"kubernetes.io/projected/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-kube-api-access-nbg26\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.421138 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-config-data\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.477536 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:05 crc kubenswrapper[4822]: W1201 07:13:05.484705 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c5d992e_2bd7_411e_ba2d_d623d09baf3c.slice/crio-a1c2497c58a2e997cce2e277c71a776265aaabe06bf665808585efc57eb087ba WatchSource:0}: Error finding container a1c2497c58a2e997cce2e277c71a776265aaabe06bf665808585efc57eb087ba: Status 404 returned error can't find the container with id a1c2497c58a2e997cce2e277c71a776265aaabe06bf665808585efc57eb087ba Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.524122 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.524242 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbg26\" (UniqueName: \"kubernetes.io/projected/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-kube-api-access-nbg26\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.524445 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-config-data\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.530583 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.530773 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-config-data\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.539916 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbg26\" (UniqueName: \"kubernetes.io/projected/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-kube-api-access-nbg26\") pod \"nova-scheduler-0\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:05 crc kubenswrapper[4822]: I1201 07:13:05.684155 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.222533 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:06 crc kubenswrapper[4822]: W1201 07:13:06.230003 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice/crio-2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840 WatchSource:0}: Error finding container 2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840: Status 404 returned error can't find the container with id 2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840 Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.302243 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5d992e-2bd7-411e-ba2d-d623d09baf3c","Type":"ContainerStarted","Data":"7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da"} Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.302375 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5d992e-2bd7-411e-ba2d-d623d09baf3c","Type":"ContainerStarted","Data":"983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9"} Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.302393 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5d992e-2bd7-411e-ba2d-d623d09baf3c","Type":"ContainerStarted","Data":"a1c2497c58a2e997cce2e277c71a776265aaabe06bf665808585efc57eb087ba"} Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.309684 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a","Type":"ContainerStarted","Data":"2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840"} Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.347946 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.347927417 podStartE2EDuration="2.347927417s" podCreationTimestamp="2025-12-01 07:13:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:06.343623326 +0000 UTC m=+1341.664431042" watchObservedRunningTime="2025-12-01 07:13:06.347927417 +0000 UTC m=+1341.668735113" Dec 01 07:13:06 crc kubenswrapper[4822]: I1201 07:13:06.965017 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22a864cf-abdf-4cb9-9af4-61afede19cbf" path="/var/lib/kubelet/pods/22a864cf-abdf-4cb9-9af4-61afede19cbf/volumes" Dec 01 07:13:07 crc kubenswrapper[4822]: I1201 07:13:07.341902 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a","Type":"ContainerStarted","Data":"42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58"} Dec 01 07:13:07 crc kubenswrapper[4822]: I1201 07:13:07.380924 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.380893867 podStartE2EDuration="2.380893867s" podCreationTimestamp="2025-12-01 07:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:07.364070814 +0000 UTC m=+1342.684878540" watchObservedRunningTime="2025-12-01 07:13:07.380893867 +0000 UTC m=+1342.701701593" Dec 01 07:13:09 crc kubenswrapper[4822]: I1201 07:13:09.712248 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 01 07:13:10 crc kubenswrapper[4822]: I1201 07:13:10.685078 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 07:13:12 crc kubenswrapper[4822]: I1201 07:13:12.542820 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:13:12 crc kubenswrapper[4822]: I1201 07:13:12.542872 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:13:12 crc kubenswrapper[4822]: I1201 07:13:12.542914 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:13:12 crc kubenswrapper[4822]: I1201 07:13:12.543615 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb68b029768127b77693597ceb7a762b8c61f40a1a25b9306ea20cdd9dcb63b5"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:13:12 crc kubenswrapper[4822]: I1201 07:13:12.543662 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://cb68b029768127b77693597ceb7a762b8c61f40a1a25b9306ea20cdd9dcb63b5" gracePeriod=600 Dec 01 07:13:13 crc kubenswrapper[4822]: I1201 07:13:13.411838 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="cb68b029768127b77693597ceb7a762b8c61f40a1a25b9306ea20cdd9dcb63b5" exitCode=0 Dec 01 07:13:13 crc kubenswrapper[4822]: I1201 07:13:13.411938 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"cb68b029768127b77693597ceb7a762b8c61f40a1a25b9306ea20cdd9dcb63b5"} Dec 01 07:13:13 crc kubenswrapper[4822]: I1201 07:13:13.412609 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272"} Dec 01 07:13:13 crc kubenswrapper[4822]: I1201 07:13:13.412636 4822 scope.go:117] "RemoveContainer" containerID="9f465aacecd8042506a98ec45b2a943f80875679afbe9996cf6b8ba8198e3f7e" Dec 01 07:13:14 crc kubenswrapper[4822]: I1201 07:13:14.988051 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:13:14 crc kubenswrapper[4822]: I1201 07:13:14.988405 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:13:15 crc kubenswrapper[4822]: I1201 07:13:15.685101 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 07:13:15 crc kubenswrapper[4822]: I1201 07:13:15.738263 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 07:13:16 crc kubenswrapper[4822]: I1201 07:13:16.070828 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:16 crc kubenswrapper[4822]: I1201 07:13:16.070854 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:16 crc kubenswrapper[4822]: I1201 07:13:16.497732 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 07:13:17 crc kubenswrapper[4822]: I1201 07:13:17.335107 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.134853 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.135569 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" containerName="kube-state-metrics" containerID="cri-o://33e9da4f4f59b718ecc9296f695fb4aadf4f7deb003919c6c2aeb2a3222f2d2e" gracePeriod=30 Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.511688 4822 generic.go:334] "Generic (PLEG): container finished" podID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" containerID="33e9da4f4f59b718ecc9296f695fb4aadf4f7deb003919c6c2aeb2a3222f2d2e" exitCode=2 Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.511870 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"47ea8f0f-c6c9-412a-b158-878fe3107ed5","Type":"ContainerDied","Data":"33e9da4f4f59b718ecc9296f695fb4aadf4f7deb003919c6c2aeb2a3222f2d2e"} Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.607401 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.701818 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st4b9\" (UniqueName: \"kubernetes.io/projected/47ea8f0f-c6c9-412a-b158-878fe3107ed5-kube-api-access-st4b9\") pod \"47ea8f0f-c6c9-412a-b158-878fe3107ed5\" (UID: \"47ea8f0f-c6c9-412a-b158-878fe3107ed5\") " Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.722885 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47ea8f0f-c6c9-412a-b158-878fe3107ed5-kube-api-access-st4b9" (OuterVolumeSpecName: "kube-api-access-st4b9") pod "47ea8f0f-c6c9-412a-b158-878fe3107ed5" (UID: "47ea8f0f-c6c9-412a-b158-878fe3107ed5"). InnerVolumeSpecName "kube-api-access-st4b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:21 crc kubenswrapper[4822]: I1201 07:13:21.805732 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st4b9\" (UniqueName: \"kubernetes.io/projected/47ea8f0f-c6c9-412a-b158-878fe3107ed5-kube-api-access-st4b9\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.523506 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"47ea8f0f-c6c9-412a-b158-878fe3107ed5","Type":"ContainerDied","Data":"3422bd0ef123c42d4c51d085787aad448cc12613ee444aa5a0f2186e69f21990"} Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.523712 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.525712 4822 scope.go:117] "RemoveContainer" containerID="33e9da4f4f59b718ecc9296f695fb4aadf4f7deb003919c6c2aeb2a3222f2d2e" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.594148 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.613619 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.625596 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:13:22 crc kubenswrapper[4822]: E1201 07:13:22.626069 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" containerName="kube-state-metrics" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.626091 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" containerName="kube-state-metrics" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.626264 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" containerName="kube-state-metrics" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.626879 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.629265 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.629751 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.636923 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.721478 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.721540 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.721760 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgk5v\" (UniqueName: \"kubernetes.io/projected/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-api-access-pgk5v\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.721978 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.823522 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.823612 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.823699 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgk5v\" (UniqueName: \"kubernetes.io/projected/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-api-access-pgk5v\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.824295 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.829719 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.830316 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.832292 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.846812 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgk5v\" (UniqueName: \"kubernetes.io/projected/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-api-access-pgk5v\") pod \"kube-state-metrics-0\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.943068 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:13:22 crc kubenswrapper[4822]: I1201 07:13:22.961139 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47ea8f0f-c6c9-412a-b158-878fe3107ed5" path="/var/lib/kubelet/pods/47ea8f0f-c6c9-412a-b158-878fe3107ed5/volumes" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.063138 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.063428 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-central-agent" containerID="cri-o://379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381" gracePeriod=30 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.064301 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="proxy-httpd" containerID="cri-o://b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6" gracePeriod=30 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.064372 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="sg-core" containerID="cri-o://9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555" gracePeriod=30 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.064423 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-notification-agent" containerID="cri-o://5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db" gracePeriod=30 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.442233 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:13:23 crc kubenswrapper[4822]: W1201 07:13:23.451418 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e079db6_a9e0_464b_b99d_57887190a5b8.slice/crio-a1f2ee5ca2f9abf0bcfd5525c4498f0a6a3574e349cecda3177d2c3bce0d4e67 WatchSource:0}: Error finding container a1f2ee5ca2f9abf0bcfd5525c4498f0a6a3574e349cecda3177d2c3bce0d4e67: Status 404 returned error can't find the container with id a1f2ee5ca2f9abf0bcfd5525c4498f0a6a3574e349cecda3177d2c3bce0d4e67 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.466133 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.536358 4822 generic.go:334] "Generic (PLEG): container finished" podID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerID="3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e" exitCode=137 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.536428 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc2522aa-c494-4b9a-a756-cb763cd85d19","Type":"ContainerDied","Data":"3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.536476 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.536532 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc2522aa-c494-4b9a-a756-cb763cd85d19","Type":"ContainerDied","Data":"f62cd0b2929ded22ac155741f36420350d80e74033217210813018cd17d3c12b"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.536565 4822 scope.go:117] "RemoveContainer" containerID="3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.539614 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8e079db6-a9e0-464b-b99d-57887190a5b8","Type":"ContainerStarted","Data":"a1f2ee5ca2f9abf0bcfd5525c4498f0a6a3574e349cecda3177d2c3bce0d4e67"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.541718 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.544589 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerID="b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6" exitCode=0 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.544621 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerID="9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555" exitCode=2 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.544629 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerID="379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381" exitCode=0 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.544671 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerDied","Data":"b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.544707 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerDied","Data":"9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.544717 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerDied","Data":"379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.547969 4822 generic.go:334] "Generic (PLEG): container finished" podID="4c74c45e-c6e3-4cf8-8202-da93ad73131c" containerID="84140cd9805af6c96d907476731a1fa2f264694b115aeb7393dfaadaf128d26b" exitCode=137 Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.548003 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4c74c45e-c6e3-4cf8-8202-da93ad73131c","Type":"ContainerDied","Data":"84140cd9805af6c96d907476731a1fa2f264694b115aeb7393dfaadaf128d26b"} Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.548033 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.561943 4822 scope.go:117] "RemoveContainer" containerID="2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.598011 4822 scope.go:117] "RemoveContainer" containerID="3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e" Dec 01 07:13:23 crc kubenswrapper[4822]: E1201 07:13:23.598433 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e\": container with ID starting with 3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e not found: ID does not exist" containerID="3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.598478 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e"} err="failed to get container status \"3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e\": rpc error: code = NotFound desc = could not find container \"3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e\": container with ID starting with 3cfa2b21cc9ed3a4022c511929bd4870c17eee172993605ef145b7f01844828e not found: ID does not exist" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.598505 4822 scope.go:117] "RemoveContainer" containerID="2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b" Dec 01 07:13:23 crc kubenswrapper[4822]: E1201 07:13:23.598804 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b\": container with ID starting with 2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b not found: ID does not exist" containerID="2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.598835 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b"} err="failed to get container status \"2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b\": rpc error: code = NotFound desc = could not find container \"2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b\": container with ID starting with 2d75e33d29ef465e71f528888dd2786c9e1ac51fd3fd35846e0113cb4024fb9b not found: ID does not exist" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.598855 4822 scope.go:117] "RemoveContainer" containerID="84140cd9805af6c96d907476731a1fa2f264694b115aeb7393dfaadaf128d26b" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.638073 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-config-data\") pod \"dc2522aa-c494-4b9a-a756-cb763cd85d19\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.638171 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzsvh\" (UniqueName: \"kubernetes.io/projected/dc2522aa-c494-4b9a-a756-cb763cd85d19-kube-api-access-wzsvh\") pod \"dc2522aa-c494-4b9a-a756-cb763cd85d19\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.638314 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-combined-ca-bundle\") pod \"dc2522aa-c494-4b9a-a756-cb763cd85d19\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.638347 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2522aa-c494-4b9a-a756-cb763cd85d19-logs\") pod \"dc2522aa-c494-4b9a-a756-cb763cd85d19\" (UID: \"dc2522aa-c494-4b9a-a756-cb763cd85d19\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.639334 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc2522aa-c494-4b9a-a756-cb763cd85d19-logs" (OuterVolumeSpecName: "logs") pod "dc2522aa-c494-4b9a-a756-cb763cd85d19" (UID: "dc2522aa-c494-4b9a-a756-cb763cd85d19"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.645340 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc2522aa-c494-4b9a-a756-cb763cd85d19-kube-api-access-wzsvh" (OuterVolumeSpecName: "kube-api-access-wzsvh") pod "dc2522aa-c494-4b9a-a756-cb763cd85d19" (UID: "dc2522aa-c494-4b9a-a756-cb763cd85d19"). InnerVolumeSpecName "kube-api-access-wzsvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.670937 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-config-data" (OuterVolumeSpecName: "config-data") pod "dc2522aa-c494-4b9a-a756-cb763cd85d19" (UID: "dc2522aa-c494-4b9a-a756-cb763cd85d19"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.676521 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc2522aa-c494-4b9a-a756-cb763cd85d19" (UID: "dc2522aa-c494-4b9a-a756-cb763cd85d19"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.740089 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-combined-ca-bundle\") pod \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.740181 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-config-data\") pod \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.740373 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt9ww\" (UniqueName: \"kubernetes.io/projected/4c74c45e-c6e3-4cf8-8202-da93ad73131c-kube-api-access-vt9ww\") pod \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\" (UID: \"4c74c45e-c6e3-4cf8-8202-da93ad73131c\") " Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.740966 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.740991 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2522aa-c494-4b9a-a756-cb763cd85d19-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.741005 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2522aa-c494-4b9a-a756-cb763cd85d19-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.741017 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzsvh\" (UniqueName: \"kubernetes.io/projected/dc2522aa-c494-4b9a-a756-cb763cd85d19-kube-api-access-wzsvh\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.746341 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c74c45e-c6e3-4cf8-8202-da93ad73131c-kube-api-access-vt9ww" (OuterVolumeSpecName: "kube-api-access-vt9ww") pod "4c74c45e-c6e3-4cf8-8202-da93ad73131c" (UID: "4c74c45e-c6e3-4cf8-8202-da93ad73131c"). InnerVolumeSpecName "kube-api-access-vt9ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.768623 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-config-data" (OuterVolumeSpecName: "config-data") pod "4c74c45e-c6e3-4cf8-8202-da93ad73131c" (UID: "4c74c45e-c6e3-4cf8-8202-da93ad73131c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.773778 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c74c45e-c6e3-4cf8-8202-da93ad73131c" (UID: "4c74c45e-c6e3-4cf8-8202-da93ad73131c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.842440 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt9ww\" (UniqueName: \"kubernetes.io/projected/4c74c45e-c6e3-4cf8-8202-da93ad73131c-kube-api-access-vt9ww\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.842773 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:23 crc kubenswrapper[4822]: I1201 07:13:23.842787 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c74c45e-c6e3-4cf8-8202-da93ad73131c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.113881 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.127647 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.138952 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.154665 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.166592 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.168915 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c74c45e-c6e3-4cf8-8202-da93ad73131c" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.168947 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c74c45e-c6e3-4cf8-8202-da93ad73131c" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.168999 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-metadata" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.169008 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-metadata" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.169020 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-log" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.169027 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-log" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.169224 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c74c45e-c6e3-4cf8-8202-da93ad73131c" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.169261 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-log" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.169285 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" containerName="nova-metadata-metadata" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.170057 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.174172 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.174300 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.174182 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.192065 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.203605 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.205408 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.208962 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.209338 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.216577 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251240 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fxbl\" (UniqueName: \"kubernetes.io/projected/067b1287-cccb-4ca9-b817-f49673c0e17a-kube-api-access-7fxbl\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251280 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251303 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251334 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251372 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/067b1287-cccb-4ca9-b817-f49673c0e17a-logs\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251390 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251409 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251511 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251594 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-config-data\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.251616 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf4zb\" (UniqueName: \"kubernetes.io/projected/03cbadb0-cfc2-4ade-9e82-efd876532994-kube-api-access-bf4zb\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.348368 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.352621 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-sg-core-conf-yaml\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.352729 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbphf\" (UniqueName: \"kubernetes.io/projected/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-kube-api-access-pbphf\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.352760 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-log-httpd\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.352966 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-config-data\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.352989 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-combined-ca-bundle\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353019 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-scripts\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353125 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-run-httpd\") pod \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\" (UID: \"c8bce372-5a61-4aae-84a8-eac76a3e3cc8\") " Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353409 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353485 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/067b1287-cccb-4ca9-b817-f49673c0e17a-logs\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353519 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353624 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353785 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353837 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-config-data\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353860 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf4zb\" (UniqueName: \"kubernetes.io/projected/03cbadb0-cfc2-4ade-9e82-efd876532994-kube-api-access-bf4zb\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353939 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fxbl\" (UniqueName: \"kubernetes.io/projected/067b1287-cccb-4ca9-b817-f49673c0e17a-kube-api-access-7fxbl\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.353988 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.354007 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.357543 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.357703 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-kube-api-access-pbphf" (OuterVolumeSpecName: "kube-api-access-pbphf") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "kube-api-access-pbphf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.360733 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.361495 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.362728 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.364588 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.364794 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.365576 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/067b1287-cccb-4ca9-b817-f49673c0e17a-logs\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.366253 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.367540 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.370699 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-config-data\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.377841 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-scripts" (OuterVolumeSpecName: "scripts") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.396179 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf4zb\" (UniqueName: \"kubernetes.io/projected/03cbadb0-cfc2-4ade-9e82-efd876532994-kube-api-access-bf4zb\") pod \"nova-cell1-novncproxy-0\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.399066 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fxbl\" (UniqueName: \"kubernetes.io/projected/067b1287-cccb-4ca9-b817-f49673c0e17a-kube-api-access-7fxbl\") pod \"nova-metadata-0\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.402279 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.455826 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.455865 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbphf\" (UniqueName: \"kubernetes.io/projected/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-kube-api-access-pbphf\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.455879 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.455890 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.455901 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.458533 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.473350 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-config-data" (OuterVolumeSpecName: "config-data") pod "c8bce372-5a61-4aae-84a8-eac76a3e3cc8" (UID: "c8bce372-5a61-4aae-84a8-eac76a3e3cc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.490001 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.532264 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.557731 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.557770 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8bce372-5a61-4aae-84a8-eac76a3e3cc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.577373 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8e079db6-a9e0-464b-b99d-57887190a5b8","Type":"ContainerStarted","Data":"52b80d1f9eb289b2321d023ee74702200d73e7e0bcf824e5f0e71860cdbd014f"} Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.577524 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.580630 4822 generic.go:334] "Generic (PLEG): container finished" podID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerID="5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db" exitCode=0 Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.580668 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerDied","Data":"5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db"} Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.580686 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8bce372-5a61-4aae-84a8-eac76a3e3cc8","Type":"ContainerDied","Data":"edd2e52b36aa0e0d0f177b5e2e88609567e2351967e657b9baa1b87e137ef445"} Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.580702 4822 scope.go:117] "RemoveContainer" containerID="b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.580802 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.630961 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.235660067 podStartE2EDuration="2.630935873s" podCreationTimestamp="2025-12-01 07:13:22 +0000 UTC" firstStartedPulling="2025-12-01 07:13:23.456084683 +0000 UTC m=+1358.776892379" lastFinishedPulling="2025-12-01 07:13:23.851360499 +0000 UTC m=+1359.172168185" observedRunningTime="2025-12-01 07:13:24.59530585 +0000 UTC m=+1359.916113536" watchObservedRunningTime="2025-12-01 07:13:24.630935873 +0000 UTC m=+1359.951743559" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.663872 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.676506 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.678998 4822 scope.go:117] "RemoveContainer" containerID="9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.686914 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.687307 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-notification-agent" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687323 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-notification-agent" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.687345 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-central-agent" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687352 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-central-agent" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.687365 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="sg-core" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687371 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="sg-core" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.687380 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="proxy-httpd" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687386 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="proxy-httpd" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687779 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-notification-agent" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687804 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="sg-core" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687815 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="proxy-httpd" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.687825 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" containerName="ceilometer-central-agent" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.689662 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.693013 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.695781 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.696083 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.700670 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.720974 4822 scope.go:117] "RemoveContainer" containerID="5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.742160 4822 scope.go:117] "RemoveContainer" containerID="379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.767303 4822 scope.go:117] "RemoveContainer" containerID="b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.777628 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6\": container with ID starting with b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6 not found: ID does not exist" containerID="b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.777693 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6"} err="failed to get container status \"b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6\": rpc error: code = NotFound desc = could not find container \"b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6\": container with ID starting with b484b62f2b7af3979901a59f2e598f4017d414feea531ac40b6b983205bf59c6 not found: ID does not exist" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.777729 4822 scope.go:117] "RemoveContainer" containerID="9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.778016 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555\": container with ID starting with 9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555 not found: ID does not exist" containerID="9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.778044 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555"} err="failed to get container status \"9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555\": rpc error: code = NotFound desc = could not find container \"9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555\": container with ID starting with 9d1500dbda603538c53e8dae24fcfea7210916198d8288f96cc236fc0b87a555 not found: ID does not exist" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.778065 4822 scope.go:117] "RemoveContainer" containerID="5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.778405 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db\": container with ID starting with 5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db not found: ID does not exist" containerID="5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.778453 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db"} err="failed to get container status \"5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db\": rpc error: code = NotFound desc = could not find container \"5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db\": container with ID starting with 5e85863c1f4dce6db5f7d3a9ce3ac0b1e3593749a1e95ab84d4f22276fa490db not found: ID does not exist" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.778486 4822 scope.go:117] "RemoveContainer" containerID="379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381" Dec 01 07:13:24 crc kubenswrapper[4822]: E1201 07:13:24.779862 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381\": container with ID starting with 379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381 not found: ID does not exist" containerID="379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.779892 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381"} err="failed to get container status \"379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381\": rpc error: code = NotFound desc = could not find container \"379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381\": container with ID starting with 379cafbe5931550211b8d80e89e526a178f91df864524bbbaac6485c86d3c381 not found: ID does not exist" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.864695 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.864740 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.864769 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-scripts\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.864799 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.864820 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-log-httpd\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.864886 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-run-httpd\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.865922 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-config-data\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.866114 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4db6\" (UniqueName: \"kubernetes.io/projected/be89f46f-916f-49c6-a29c-adac02673b60-kube-api-access-x4db6\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.968530 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.968635 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-log-httpd\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.968669 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-run-httpd\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.968794 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-config-data\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.968896 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4db6\" (UniqueName: \"kubernetes.io/projected/be89f46f-916f-49c6-a29c-adac02673b60-kube-api-access-x4db6\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.968994 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.969026 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.969066 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-scripts\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.970098 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-run-httpd\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.970393 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-log-httpd\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.978041 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.978596 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-scripts\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.980099 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.980470 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c74c45e-c6e3-4cf8-8202-da93ad73131c" path="/var/lib/kubelet/pods/4c74c45e-c6e3-4cf8-8202-da93ad73131c/volumes" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.981128 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-config-data\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.981446 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8bce372-5a61-4aae-84a8-eac76a3e3cc8" path="/var/lib/kubelet/pods/c8bce372-5a61-4aae-84a8-eac76a3e3cc8/volumes" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.982282 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.982810 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc2522aa-c494-4b9a-a756-cb763cd85d19" path="/var/lib/kubelet/pods/dc2522aa-c494-4b9a-a756-cb763cd85d19/volumes" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.989344 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4db6\" (UniqueName: \"kubernetes.io/projected/be89f46f-916f-49c6-a29c-adac02673b60-kube-api-access-x4db6\") pod \"ceilometer-0\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " pod="openstack/ceilometer-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.990753 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.992006 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:13:24 crc kubenswrapper[4822]: I1201 07:13:24.992362 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.002128 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.009160 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:13:25 crc kubenswrapper[4822]: W1201 07:13:25.017936 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03cbadb0_cfc2_4ade_9e82_efd876532994.slice/crio-0c58259b1ab28e6448f9db53319a670cfb6a1020523336eee853402052d9bb89 WatchSource:0}: Error finding container 0c58259b1ab28e6448f9db53319a670cfb6a1020523336eee853402052d9bb89: Status 404 returned error can't find the container with id 0c58259b1ab28e6448f9db53319a670cfb6a1020523336eee853402052d9bb89 Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.019787 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.073437 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:25 crc kubenswrapper[4822]: W1201 07:13:25.075469 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod067b1287_cccb_4ca9_b817_f49673c0e17a.slice/crio-114027931bb197dae4b83569799298355e9f3c32f35b73ad0a7ff8b2125dad52 WatchSource:0}: Error finding container 114027931bb197dae4b83569799298355e9f3c32f35b73ad0a7ff8b2125dad52: Status 404 returned error can't find the container with id 114027931bb197dae4b83569799298355e9f3c32f35b73ad0a7ff8b2125dad52 Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.515124 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:25 crc kubenswrapper[4822]: W1201 07:13:25.520151 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe89f46f_916f_49c6_a29c_adac02673b60.slice/crio-38002bf7fffa35889ed282ea5b1dc514ad52eda41464a477cdb3a12ae7b711dc WatchSource:0}: Error finding container 38002bf7fffa35889ed282ea5b1dc514ad52eda41464a477cdb3a12ae7b711dc: Status 404 returned error can't find the container with id 38002bf7fffa35889ed282ea5b1dc514ad52eda41464a477cdb3a12ae7b711dc Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.610424 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"03cbadb0-cfc2-4ade-9e82-efd876532994","Type":"ContainerStarted","Data":"e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f"} Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.610472 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"03cbadb0-cfc2-4ade-9e82-efd876532994","Type":"ContainerStarted","Data":"0c58259b1ab28e6448f9db53319a670cfb6a1020523336eee853402052d9bb89"} Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.615855 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"067b1287-cccb-4ca9-b817-f49673c0e17a","Type":"ContainerStarted","Data":"da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453"} Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.615887 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"067b1287-cccb-4ca9-b817-f49673c0e17a","Type":"ContainerStarted","Data":"25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630"} Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.615901 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"067b1287-cccb-4ca9-b817-f49673c0e17a","Type":"ContainerStarted","Data":"114027931bb197dae4b83569799298355e9f3c32f35b73ad0a7ff8b2125dad52"} Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.617465 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerStarted","Data":"38002bf7fffa35889ed282ea5b1dc514ad52eda41464a477cdb3a12ae7b711dc"} Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.618321 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.624776 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.641082 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.641064621 podStartE2EDuration="1.641064621s" podCreationTimestamp="2025-12-01 07:13:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:25.62576314 +0000 UTC m=+1360.946570866" watchObservedRunningTime="2025-12-01 07:13:25.641064621 +0000 UTC m=+1360.961872297" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.658210 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.658185182 podStartE2EDuration="1.658185182s" podCreationTimestamp="2025-12-01 07:13:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:25.646898775 +0000 UTC m=+1360.967706471" watchObservedRunningTime="2025-12-01 07:13:25.658185182 +0000 UTC m=+1360.978992898" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.835438 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-zlscm"] Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.837905 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.891625 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-zlscm"] Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.893795 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-svc\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.893929 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.894044 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.894145 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg8g8\" (UniqueName: \"kubernetes.io/projected/8f969273-1c0e-4ac7-aae1-e04cea6c864f-kube-api-access-rg8g8\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.894244 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-swift-storage-0\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.894314 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-config\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.995781 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-swift-storage-0\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.995814 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-config\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.995899 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-svc\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.995973 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.996056 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:25 crc kubenswrapper[4822]: I1201 07:13:25.996101 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg8g8\" (UniqueName: \"kubernetes.io/projected/8f969273-1c0e-4ac7-aae1-e04cea6c864f-kube-api-access-rg8g8\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.000688 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.001260 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.001408 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-svc\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.002194 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-swift-storage-0\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.002432 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-config\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.016844 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg8g8\" (UniqueName: \"kubernetes.io/projected/8f969273-1c0e-4ac7-aae1-e04cea6c864f-kube-api-access-rg8g8\") pod \"dnsmasq-dns-5d8fc4ccc9-zlscm\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.244890 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.645482 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerStarted","Data":"aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9"} Dec 01 07:13:26 crc kubenswrapper[4822]: I1201 07:13:26.748243 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-zlscm"] Dec 01 07:13:26 crc kubenswrapper[4822]: W1201 07:13:26.748743 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f969273_1c0e_4ac7_aae1_e04cea6c864f.slice/crio-33f99ab0ecce4d1a9d3c67881a8de41dcec841780ac802e261f6e967d6e2800a WatchSource:0}: Error finding container 33f99ab0ecce4d1a9d3c67881a8de41dcec841780ac802e261f6e967d6e2800a: Status 404 returned error can't find the container with id 33f99ab0ecce4d1a9d3c67881a8de41dcec841780ac802e261f6e967d6e2800a Dec 01 07:13:27 crc kubenswrapper[4822]: I1201 07:13:27.659668 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerStarted","Data":"16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402"} Dec 01 07:13:27 crc kubenswrapper[4822]: I1201 07:13:27.663077 4822 generic.go:334] "Generic (PLEG): container finished" podID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerID="3fa7a9f9bdcb1379ddf677ccdeca716a03c96ecdbe0e60bfa23301dbe79a2174" exitCode=0 Dec 01 07:13:27 crc kubenswrapper[4822]: I1201 07:13:27.663269 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" event={"ID":"8f969273-1c0e-4ac7-aae1-e04cea6c864f","Type":"ContainerDied","Data":"3fa7a9f9bdcb1379ddf677ccdeca716a03c96ecdbe0e60bfa23301dbe79a2174"} Dec 01 07:13:27 crc kubenswrapper[4822]: I1201 07:13:27.663331 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" event={"ID":"8f969273-1c0e-4ac7-aae1-e04cea6c864f","Type":"ContainerStarted","Data":"33f99ab0ecce4d1a9d3c67881a8de41dcec841780ac802e261f6e967d6e2800a"} Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.580613 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.673733 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" event={"ID":"8f969273-1c0e-4ac7-aae1-e04cea6c864f","Type":"ContainerStarted","Data":"29cb191f67cb3d995ae68c5f80b0794867e6eb356848867309a88a2a42dd2ba1"} Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.674221 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.676706 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerStarted","Data":"eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd"} Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.676853 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-log" containerID="cri-o://983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9" gracePeriod=30 Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.676884 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-api" containerID="cri-o://7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da" gracePeriod=30 Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.705117 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" podStartSLOduration=3.705097968 podStartE2EDuration="3.705097968s" podCreationTimestamp="2025-12-01 07:13:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:28.6950657 +0000 UTC m=+1364.015873386" watchObservedRunningTime="2025-12-01 07:13:28.705097968 +0000 UTC m=+1364.025905654" Dec 01 07:13:28 crc kubenswrapper[4822]: I1201 07:13:28.822013 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.490981 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.532769 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.533085 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.694741 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerStarted","Data":"7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56"} Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.694989 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-central-agent" containerID="cri-o://aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9" gracePeriod=30 Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.695339 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.695742 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="proxy-httpd" containerID="cri-o://7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56" gracePeriod=30 Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.695790 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="sg-core" containerID="cri-o://eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd" gracePeriod=30 Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.695828 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-notification-agent" containerID="cri-o://16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402" gracePeriod=30 Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.702672 4822 generic.go:334] "Generic (PLEG): container finished" podID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerID="983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9" exitCode=143 Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.702738 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5d992e-2bd7-411e-ba2d-d623d09baf3c","Type":"ContainerDied","Data":"983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9"} Dec 01 07:13:29 crc kubenswrapper[4822]: I1201 07:13:29.722649 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.859345325 podStartE2EDuration="5.722631697s" podCreationTimestamp="2025-12-01 07:13:24 +0000 UTC" firstStartedPulling="2025-12-01 07:13:25.522319731 +0000 UTC m=+1360.843127417" lastFinishedPulling="2025-12-01 07:13:29.385606093 +0000 UTC m=+1364.706413789" observedRunningTime="2025-12-01 07:13:29.717382601 +0000 UTC m=+1365.038190287" watchObservedRunningTime="2025-12-01 07:13:29.722631697 +0000 UTC m=+1365.043439383" Dec 01 07:13:30 crc kubenswrapper[4822]: I1201 07:13:30.716264 4822 generic.go:334] "Generic (PLEG): container finished" podID="be89f46f-916f-49c6-a29c-adac02673b60" containerID="eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd" exitCode=2 Dec 01 07:13:30 crc kubenswrapper[4822]: I1201 07:13:30.716658 4822 generic.go:334] "Generic (PLEG): container finished" podID="be89f46f-916f-49c6-a29c-adac02673b60" containerID="16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402" exitCode=0 Dec 01 07:13:30 crc kubenswrapper[4822]: I1201 07:13:30.716347 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerDied","Data":"eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd"} Dec 01 07:13:30 crc kubenswrapper[4822]: I1201 07:13:30.716723 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerDied","Data":"16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402"} Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.340453 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.439621 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-logs\") pod \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.439744 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6djz\" (UniqueName: \"kubernetes.io/projected/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-kube-api-access-d6djz\") pod \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.439823 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-config-data\") pod \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.440075 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-combined-ca-bundle\") pod \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\" (UID: \"3c5d992e-2bd7-411e-ba2d-d623d09baf3c\") " Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.440590 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-logs" (OuterVolumeSpecName: "logs") pod "3c5d992e-2bd7-411e-ba2d-d623d09baf3c" (UID: "3c5d992e-2bd7-411e-ba2d-d623d09baf3c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.447797 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-kube-api-access-d6djz" (OuterVolumeSpecName: "kube-api-access-d6djz") pod "3c5d992e-2bd7-411e-ba2d-d623d09baf3c" (UID: "3c5d992e-2bd7-411e-ba2d-d623d09baf3c"). InnerVolumeSpecName "kube-api-access-d6djz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.473890 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-config-data" (OuterVolumeSpecName: "config-data") pod "3c5d992e-2bd7-411e-ba2d-d623d09baf3c" (UID: "3c5d992e-2bd7-411e-ba2d-d623d09baf3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.489824 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c5d992e-2bd7-411e-ba2d-d623d09baf3c" (UID: "3c5d992e-2bd7-411e-ba2d-d623d09baf3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.542701 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.542750 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.542760 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6djz\" (UniqueName: \"kubernetes.io/projected/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-kube-api-access-d6djz\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.542771 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5d992e-2bd7-411e-ba2d-d623d09baf3c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.740020 4822 generic.go:334] "Generic (PLEG): container finished" podID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerID="7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da" exitCode=0 Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.740067 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5d992e-2bd7-411e-ba2d-d623d09baf3c","Type":"ContainerDied","Data":"7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da"} Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.740120 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3c5d992e-2bd7-411e-ba2d-d623d09baf3c","Type":"ContainerDied","Data":"a1c2497c58a2e997cce2e277c71a776265aaabe06bf665808585efc57eb087ba"} Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.740141 4822 scope.go:117] "RemoveContainer" containerID="7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.740348 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.766798 4822 scope.go:117] "RemoveContainer" containerID="983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.797123 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.797717 4822 scope.go:117] "RemoveContainer" containerID="7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da" Dec 01 07:13:32 crc kubenswrapper[4822]: E1201 07:13:32.798136 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da\": container with ID starting with 7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da not found: ID does not exist" containerID="7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.798168 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da"} err="failed to get container status \"7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da\": rpc error: code = NotFound desc = could not find container \"7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da\": container with ID starting with 7200ada061a763689bbee04291b9f5ec18f3245862524f9f24e684b471cc81da not found: ID does not exist" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.798189 4822 scope.go:117] "RemoveContainer" containerID="983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9" Dec 01 07:13:32 crc kubenswrapper[4822]: E1201 07:13:32.798855 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9\": container with ID starting with 983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9 not found: ID does not exist" containerID="983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.798879 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9"} err="failed to get container status \"983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9\": rpc error: code = NotFound desc = could not find container \"983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9\": container with ID starting with 983f996bb1c15e2ea7ef1f5d448857f62c7a7e4df1685f91acb9615216d08ce9 not found: ID does not exist" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.808790 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.825184 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:32 crc kubenswrapper[4822]: E1201 07:13:32.825596 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-api" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.825608 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-api" Dec 01 07:13:32 crc kubenswrapper[4822]: E1201 07:13:32.825636 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-log" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.825642 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-log" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.825819 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-log" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.825844 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" containerName="nova-api-api" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.826937 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.828925 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.829170 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.829322 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.841503 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.847651 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.847733 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-config-data\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.847763 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-logs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.847811 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-public-tls-certs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.847890 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9vch\" (UniqueName: \"kubernetes.io/projected/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-kube-api-access-g9vch\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.847917 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.950085 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.955430 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.955799 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-config-data\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.955834 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-logs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.955883 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-public-tls-certs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.955962 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9vch\" (UniqueName: \"kubernetes.io/projected/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-kube-api-access-g9vch\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.955988 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.957209 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-logs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.959022 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.959444 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-public-tls-certs\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.960000 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-config-data\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.979344 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c5d992e-2bd7-411e-ba2d-d623d09baf3c" path="/var/lib/kubelet/pods/3c5d992e-2bd7-411e-ba2d-d623d09baf3c/volumes" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.980083 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 07:13:32 crc kubenswrapper[4822]: I1201 07:13:32.980707 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9vch\" (UniqueName: \"kubernetes.io/projected/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-kube-api-access-g9vch\") pod \"nova-api-0\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " pod="openstack/nova-api-0" Dec 01 07:13:33 crc kubenswrapper[4822]: I1201 07:13:33.157686 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:33 crc kubenswrapper[4822]: I1201 07:13:33.665961 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:33 crc kubenswrapper[4822]: W1201 07:13:33.668347 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3a3625d_4e93_4b9b_bedf_a97a58ecfecc.slice/crio-ca5664612596e8d1402be585c7acfecdf10de9d1655e865a70d0f50ee640bbfc WatchSource:0}: Error finding container ca5664612596e8d1402be585c7acfecdf10de9d1655e865a70d0f50ee640bbfc: Status 404 returned error can't find the container with id ca5664612596e8d1402be585c7acfecdf10de9d1655e865a70d0f50ee640bbfc Dec 01 07:13:33 crc kubenswrapper[4822]: I1201 07:13:33.773285 4822 generic.go:334] "Generic (PLEG): container finished" podID="be89f46f-916f-49c6-a29c-adac02673b60" containerID="aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9" exitCode=0 Dec 01 07:13:33 crc kubenswrapper[4822]: I1201 07:13:33.773349 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerDied","Data":"aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9"} Dec 01 07:13:33 crc kubenswrapper[4822]: I1201 07:13:33.775882 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc","Type":"ContainerStarted","Data":"ca5664612596e8d1402be585c7acfecdf10de9d1655e865a70d0f50ee640bbfc"} Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.490938 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.511813 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.532842 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.532885 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.790185 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc","Type":"ContainerStarted","Data":"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544"} Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.790712 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc","Type":"ContainerStarted","Data":"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc"} Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.818643 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.818615179 podStartE2EDuration="2.818615179s" podCreationTimestamp="2025-12-01 07:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:34.816339386 +0000 UTC m=+1370.137147072" watchObservedRunningTime="2025-12-01 07:13:34.818615179 +0000 UTC m=+1370.139422895" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.823019 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.963348 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-jv7nr"] Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.964476 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.964971 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-jv7nr"] Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.967050 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.967249 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.997960 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-scripts\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.998003 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-config-data\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.998034 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx4m5\" (UniqueName: \"kubernetes.io/projected/be029423-e53b-476b-88ef-99ad1623a8c4-kube-api-access-cx4m5\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:34 crc kubenswrapper[4822]: I1201 07:13:34.998183 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.100091 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-scripts\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.100132 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-config-data\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.100164 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx4m5\" (UniqueName: \"kubernetes.io/projected/be029423-e53b-476b-88ef-99ad1623a8c4-kube-api-access-cx4m5\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.100238 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.105616 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.106309 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-config-data\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.114558 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-scripts\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.124166 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx4m5\" (UniqueName: \"kubernetes.io/projected/be029423-e53b-476b-88ef-99ad1623a8c4-kube-api-access-cx4m5\") pod \"nova-cell1-cell-mapping-jv7nr\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.290007 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.547800 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.547807 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:35 crc kubenswrapper[4822]: I1201 07:13:35.811658 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-jv7nr"] Dec 01 07:13:35 crc kubenswrapper[4822]: W1201 07:13:35.819751 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe029423_e53b_476b_88ef_99ad1623a8c4.slice/crio-ed40e73d2df9e69ef4c1c863dbdc4a5f8b3d7ddfbf96b775e326af4653d915c4 WatchSource:0}: Error finding container ed40e73d2df9e69ef4c1c863dbdc4a5f8b3d7ddfbf96b775e326af4653d915c4: Status 404 returned error can't find the container with id ed40e73d2df9e69ef4c1c863dbdc4a5f8b3d7ddfbf96b775e326af4653d915c4 Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.246335 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.337902 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-d9p82"] Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.338417 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" podUID="bea1d532-5241-4968-84c8-badda78b7e79" containerName="dnsmasq-dns" containerID="cri-o://18522af8c645a747d69c9facd364e98dd7884610f65cb4041ab9af854278bd63" gracePeriod=10 Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.814502 4822 generic.go:334] "Generic (PLEG): container finished" podID="bea1d532-5241-4968-84c8-badda78b7e79" containerID="18522af8c645a747d69c9facd364e98dd7884610f65cb4041ab9af854278bd63" exitCode=0 Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.814590 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" event={"ID":"bea1d532-5241-4968-84c8-badda78b7e79","Type":"ContainerDied","Data":"18522af8c645a747d69c9facd364e98dd7884610f65cb4041ab9af854278bd63"} Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.814637 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" event={"ID":"bea1d532-5241-4968-84c8-badda78b7e79","Type":"ContainerDied","Data":"22e69159f758a608c1626ad031b5f8b89a1ce4ecaa6e23656e79401d9bed4759"} Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.814648 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22e69159f758a608c1626ad031b5f8b89a1ce4ecaa6e23656e79401d9bed4759" Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.816347 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jv7nr" event={"ID":"be029423-e53b-476b-88ef-99ad1623a8c4","Type":"ContainerStarted","Data":"b8722356572623cc5fc34be9c971b4dcd146825fe883537693297932f5b2745f"} Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.816389 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jv7nr" event={"ID":"be029423-e53b-476b-88ef-99ad1623a8c4","Type":"ContainerStarted","Data":"ed40e73d2df9e69ef4c1c863dbdc4a5f8b3d7ddfbf96b775e326af4653d915c4"} Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.840705 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-jv7nr" podStartSLOduration=2.840685626 podStartE2EDuration="2.840685626s" podCreationTimestamp="2025-12-01 07:13:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:36.831731608 +0000 UTC m=+1372.152539294" watchObservedRunningTime="2025-12-01 07:13:36.840685626 +0000 UTC m=+1372.161493322" Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.878836 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.941599 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-config\") pod \"bea1d532-5241-4968-84c8-badda78b7e79\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.941690 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-swift-storage-0\") pod \"bea1d532-5241-4968-84c8-badda78b7e79\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.941724 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-svc\") pod \"bea1d532-5241-4968-84c8-badda78b7e79\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.941741 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbdk9\" (UniqueName: \"kubernetes.io/projected/bea1d532-5241-4968-84c8-badda78b7e79-kube-api-access-zbdk9\") pod \"bea1d532-5241-4968-84c8-badda78b7e79\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.941791 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-nb\") pod \"bea1d532-5241-4968-84c8-badda78b7e79\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.941826 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-sb\") pod \"bea1d532-5241-4968-84c8-badda78b7e79\" (UID: \"bea1d532-5241-4968-84c8-badda78b7e79\") " Dec 01 07:13:36 crc kubenswrapper[4822]: I1201 07:13:36.961612 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea1d532-5241-4968-84c8-badda78b7e79-kube-api-access-zbdk9" (OuterVolumeSpecName: "kube-api-access-zbdk9") pod "bea1d532-5241-4968-84c8-badda78b7e79" (UID: "bea1d532-5241-4968-84c8-badda78b7e79"). InnerVolumeSpecName "kube-api-access-zbdk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.002289 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bea1d532-5241-4968-84c8-badda78b7e79" (UID: "bea1d532-5241-4968-84c8-badda78b7e79"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.029067 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bea1d532-5241-4968-84c8-badda78b7e79" (UID: "bea1d532-5241-4968-84c8-badda78b7e79"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.034636 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-config" (OuterVolumeSpecName: "config") pod "bea1d532-5241-4968-84c8-badda78b7e79" (UID: "bea1d532-5241-4968-84c8-badda78b7e79"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.037518 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bea1d532-5241-4968-84c8-badda78b7e79" (UID: "bea1d532-5241-4968-84c8-badda78b7e79"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.038021 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bea1d532-5241-4968-84c8-badda78b7e79" (UID: "bea1d532-5241-4968-84c8-badda78b7e79"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.044025 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.044063 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.044077 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.044089 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.044103 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbdk9\" (UniqueName: \"kubernetes.io/projected/bea1d532-5241-4968-84c8-badda78b7e79-kube-api-access-zbdk9\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.044116 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea1d532-5241-4968-84c8-badda78b7e79-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.825053 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-d9p82" Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.858380 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-d9p82"] Dec 01 07:13:37 crc kubenswrapper[4822]: I1201 07:13:37.866472 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-d9p82"] Dec 01 07:13:38 crc kubenswrapper[4822]: I1201 07:13:38.979866 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bea1d532-5241-4968-84c8-badda78b7e79" path="/var/lib/kubelet/pods/bea1d532-5241-4968-84c8-badda78b7e79/volumes" Dec 01 07:13:40 crc kubenswrapper[4822]: I1201 07:13:40.871334 4822 generic.go:334] "Generic (PLEG): container finished" podID="be029423-e53b-476b-88ef-99ad1623a8c4" containerID="b8722356572623cc5fc34be9c971b4dcd146825fe883537693297932f5b2745f" exitCode=0 Dec 01 07:13:40 crc kubenswrapper[4822]: I1201 07:13:40.871526 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jv7nr" event={"ID":"be029423-e53b-476b-88ef-99ad1623a8c4","Type":"ContainerDied","Data":"b8722356572623cc5fc34be9c971b4dcd146825fe883537693297932f5b2745f"} Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.328119 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.451070 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-scripts\") pod \"be029423-e53b-476b-88ef-99ad1623a8c4\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.451228 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-combined-ca-bundle\") pod \"be029423-e53b-476b-88ef-99ad1623a8c4\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.451299 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-config-data\") pod \"be029423-e53b-476b-88ef-99ad1623a8c4\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.451331 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx4m5\" (UniqueName: \"kubernetes.io/projected/be029423-e53b-476b-88ef-99ad1623a8c4-kube-api-access-cx4m5\") pod \"be029423-e53b-476b-88ef-99ad1623a8c4\" (UID: \"be029423-e53b-476b-88ef-99ad1623a8c4\") " Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.467933 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-scripts" (OuterVolumeSpecName: "scripts") pod "be029423-e53b-476b-88ef-99ad1623a8c4" (UID: "be029423-e53b-476b-88ef-99ad1623a8c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.467986 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be029423-e53b-476b-88ef-99ad1623a8c4-kube-api-access-cx4m5" (OuterVolumeSpecName: "kube-api-access-cx4m5") pod "be029423-e53b-476b-88ef-99ad1623a8c4" (UID: "be029423-e53b-476b-88ef-99ad1623a8c4"). InnerVolumeSpecName "kube-api-access-cx4m5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.488835 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-config-data" (OuterVolumeSpecName: "config-data") pod "be029423-e53b-476b-88ef-99ad1623a8c4" (UID: "be029423-e53b-476b-88ef-99ad1623a8c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.492128 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be029423-e53b-476b-88ef-99ad1623a8c4" (UID: "be029423-e53b-476b-88ef-99ad1623a8c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.553845 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.553910 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.553931 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be029423-e53b-476b-88ef-99ad1623a8c4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.553952 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx4m5\" (UniqueName: \"kubernetes.io/projected/be029423-e53b-476b-88ef-99ad1623a8c4-kube-api-access-cx4m5\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.891230 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jv7nr" event={"ID":"be029423-e53b-476b-88ef-99ad1623a8c4","Type":"ContainerDied","Data":"ed40e73d2df9e69ef4c1c863dbdc4a5f8b3d7ddfbf96b775e326af4653d915c4"} Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.891281 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed40e73d2df9e69ef4c1c863dbdc4a5f8b3d7ddfbf96b775e326af4653d915c4" Dec 01 07:13:42 crc kubenswrapper[4822]: I1201 07:13:42.891322 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jv7nr" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.097850 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.098113 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-log" containerID="cri-o://78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc" gracePeriod=30 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.098243 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-api" containerID="cri-o://460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544" gracePeriod=30 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.150781 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.151507 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" containerName="nova-scheduler-scheduler" containerID="cri-o://42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58" gracePeriod=30 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.184984 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.185503 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-log" containerID="cri-o://25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630" gracePeriod=30 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.185681 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-metadata" containerID="cri-o://da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453" gracePeriod=30 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.693968 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.875620 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-public-tls-certs\") pod \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.875718 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-internal-tls-certs\") pod \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.875763 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-config-data\") pod \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.875882 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9vch\" (UniqueName: \"kubernetes.io/projected/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-kube-api-access-g9vch\") pod \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.875939 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-combined-ca-bundle\") pod \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.876028 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-logs\") pod \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\" (UID: \"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc\") " Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.876977 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-logs" (OuterVolumeSpecName: "logs") pod "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" (UID: "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.897818 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-kube-api-access-g9vch" (OuterVolumeSpecName: "kube-api-access-g9vch") pod "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" (UID: "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc"). InnerVolumeSpecName "kube-api-access-g9vch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.908243 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-config-data" (OuterVolumeSpecName: "config-data") pod "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" (UID: "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.915310 4822 generic.go:334] "Generic (PLEG): container finished" podID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerID="25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630" exitCode=143 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.915402 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"067b1287-cccb-4ca9-b817-f49673c0e17a","Type":"ContainerDied","Data":"25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630"} Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.919825 4822 generic.go:334] "Generic (PLEG): container finished" podID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerID="460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544" exitCode=0 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.920207 4822 generic.go:334] "Generic (PLEG): container finished" podID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerID="78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc" exitCode=143 Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.919963 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc","Type":"ContainerDied","Data":"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544"} Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.919918 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.920255 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc","Type":"ContainerDied","Data":"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc"} Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.920271 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3a3625d-4e93-4b9b-bedf-a97a58ecfecc","Type":"ContainerDied","Data":"ca5664612596e8d1402be585c7acfecdf10de9d1655e865a70d0f50ee640bbfc"} Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.920290 4822 scope.go:117] "RemoveContainer" containerID="460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.925211 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" (UID: "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.964937 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" (UID: "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.980371 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.980673 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.980808 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.980919 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9vch\" (UniqueName: \"kubernetes.io/projected/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-kube-api-access-g9vch\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.981027 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:43 crc kubenswrapper[4822]: I1201 07:13:43.989380 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" (UID: "a3a3625d-4e93-4b9b-bedf-a97a58ecfecc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.052331 4822 scope.go:117] "RemoveContainer" containerID="78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.077636 4822 scope.go:117] "RemoveContainer" containerID="460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544" Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.078162 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544\": container with ID starting with 460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544 not found: ID does not exist" containerID="460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.078221 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544"} err="failed to get container status \"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544\": rpc error: code = NotFound desc = could not find container \"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544\": container with ID starting with 460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544 not found: ID does not exist" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.078247 4822 scope.go:117] "RemoveContainer" containerID="78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc" Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.078717 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc\": container with ID starting with 78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc not found: ID does not exist" containerID="78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.078748 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc"} err="failed to get container status \"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc\": rpc error: code = NotFound desc = could not find container \"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc\": container with ID starting with 78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc not found: ID does not exist" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.078762 4822 scope.go:117] "RemoveContainer" containerID="460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.079169 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544"} err="failed to get container status \"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544\": rpc error: code = NotFound desc = could not find container \"460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544\": container with ID starting with 460569eab85bd19d5c8829934fe1134fba5abb84a2d19c344a0c63983c6d4544 not found: ID does not exist" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.079310 4822 scope.go:117] "RemoveContainer" containerID="78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.079687 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc"} err="failed to get container status \"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc\": rpc error: code = NotFound desc = could not find container \"78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc\": container with ID starting with 78cad36f2f5886855b9dd140be00c7852a6f4a431448b5fcb5bfd983966a89fc not found: ID does not exist" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.083395 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.252398 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.265488 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.281311 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.281799 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea1d532-5241-4968-84c8-badda78b7e79" containerName="init" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.281815 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea1d532-5241-4968-84c8-badda78b7e79" containerName="init" Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.281827 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-api" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.281836 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-api" Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.281847 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea1d532-5241-4968-84c8-badda78b7e79" containerName="dnsmasq-dns" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.281855 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea1d532-5241-4968-84c8-badda78b7e79" containerName="dnsmasq-dns" Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.281875 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-log" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.281885 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-log" Dec 01 07:13:44 crc kubenswrapper[4822]: E1201 07:13:44.281900 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be029423-e53b-476b-88ef-99ad1623a8c4" containerName="nova-manage" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.281909 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="be029423-e53b-476b-88ef-99ad1623a8c4" containerName="nova-manage" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.282140 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="bea1d532-5241-4968-84c8-badda78b7e79" containerName="dnsmasq-dns" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.282161 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="be029423-e53b-476b-88ef-99ad1623a8c4" containerName="nova-manage" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.282185 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-api" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.282197 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" containerName="nova-api-log" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.283393 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.289039 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.289779 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.289892 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.295059 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.296627 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-public-tls-certs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.296663 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-config-data\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.296719 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.296760 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3076b4c6-b401-48a5-8343-a34d9c979ea3-logs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.296815 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.296835 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg52x\" (UniqueName: \"kubernetes.io/projected/3076b4c6-b401-48a5-8343-a34d9c979ea3-kube-api-access-zg52x\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.398109 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-public-tls-certs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.398365 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-config-data\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.398508 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.398654 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3076b4c6-b401-48a5-8343-a34d9c979ea3-logs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.398811 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.398947 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg52x\" (UniqueName: \"kubernetes.io/projected/3076b4c6-b401-48a5-8343-a34d9c979ea3-kube-api-access-zg52x\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.399025 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3076b4c6-b401-48a5-8343-a34d9c979ea3-logs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.402381 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-config-data\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.402581 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.404393 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-public-tls-certs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.406538 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.417084 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg52x\" (UniqueName: \"kubernetes.io/projected/3076b4c6-b401-48a5-8343-a34d9c979ea3-kube-api-access-zg52x\") pod \"nova-api-0\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.689103 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.783016 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.818420 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-config-data\") pod \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.818647 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbg26\" (UniqueName: \"kubernetes.io/projected/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-kube-api-access-nbg26\") pod \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.818931 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-combined-ca-bundle\") pod \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\" (UID: \"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a\") " Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.838212 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-kube-api-access-nbg26" (OuterVolumeSpecName: "kube-api-access-nbg26") pod "04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" (UID: "04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a"). InnerVolumeSpecName "kube-api-access-nbg26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.855712 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" (UID: "04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.937097 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.937127 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbg26\" (UniqueName: \"kubernetes.io/projected/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-kube-api-access-nbg26\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:44 crc kubenswrapper[4822]: I1201 07:13:44.937283 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-config-data" (OuterVolumeSpecName: "config-data") pod "04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" (UID: "04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.002837 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3a3625d-4e93-4b9b-bedf-a97a58ecfecc" path="/var/lib/kubelet/pods/a3a3625d-4e93-4b9b-bedf-a97a58ecfecc/volumes" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.010989 4822 generic.go:334] "Generic (PLEG): container finished" podID="04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" containerID="42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58" exitCode=0 Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.011070 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a","Type":"ContainerDied","Data":"42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58"} Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.011095 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a","Type":"ContainerDied","Data":"2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840"} Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.011112 4822 scope.go:117] "RemoveContainer" containerID="42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.011204 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.039898 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.085743 4822 scope.go:117] "RemoveContainer" containerID="42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58" Dec 01 07:13:45 crc kubenswrapper[4822]: E1201 07:13:45.086806 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58\": container with ID starting with 42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58 not found: ID does not exist" containerID="42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.086838 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58"} err="failed to get container status \"42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58\": rpc error: code = NotFound desc = could not find container \"42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58\": container with ID starting with 42d2d4fceebf1e48f8eb5a0f171b9c9e80da806e57b7e1333fb02a6a90965e58 not found: ID does not exist" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.116059 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.126043 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.136730 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:45 crc kubenswrapper[4822]: E1201 07:13:45.137170 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" containerName="nova-scheduler-scheduler" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.137189 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" containerName="nova-scheduler-scheduler" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.137361 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" containerName="nova-scheduler-scheduler" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.138010 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.144919 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.151426 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.242619 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m65p5\" (UniqueName: \"kubernetes.io/projected/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-kube-api-access-m65p5\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.243033 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.243308 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-config-data\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.288970 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.345434 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m65p5\" (UniqueName: \"kubernetes.io/projected/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-kube-api-access-m65p5\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.345624 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.345701 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-config-data\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.351633 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.353089 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-config-data\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.373290 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m65p5\" (UniqueName: \"kubernetes.io/projected/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-kube-api-access-m65p5\") pod \"nova-scheduler-0\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.458290 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:13:45 crc kubenswrapper[4822]: I1201 07:13:45.980140 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.057932 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3076b4c6-b401-48a5-8343-a34d9c979ea3","Type":"ContainerStarted","Data":"2737b375be318a62945c13daa6a442680918aa2addf2fb31b4a1be4501d00b4c"} Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.057985 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3076b4c6-b401-48a5-8343-a34d9c979ea3","Type":"ContainerStarted","Data":"99c5094f6036c81b6e4b694863a462ef98197e4530f7c0d2c8e4bc0a7902f727"} Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.058003 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3076b4c6-b401-48a5-8343-a34d9c979ea3","Type":"ContainerStarted","Data":"a326356410cb4bde6ccc7c21c0c89785aa237faedd662b8a1d70028efd4dfba9"} Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.060596 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210","Type":"ContainerStarted","Data":"2252be5f1e0cdc71190d795f6089aab7203918d3e5efadcdcd559b4d1ba84f22"} Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.831155 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.874489 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.8744643 podStartE2EDuration="2.8744643s" podCreationTimestamp="2025-12-01 07:13:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:46.079415333 +0000 UTC m=+1381.400223029" watchObservedRunningTime="2025-12-01 07:13:46.8744643 +0000 UTC m=+1382.195272026" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.888596 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-combined-ca-bundle\") pod \"067b1287-cccb-4ca9-b817-f49673c0e17a\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.888835 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fxbl\" (UniqueName: \"kubernetes.io/projected/067b1287-cccb-4ca9-b817-f49673c0e17a-kube-api-access-7fxbl\") pod \"067b1287-cccb-4ca9-b817-f49673c0e17a\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.889120 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/067b1287-cccb-4ca9-b817-f49673c0e17a-logs\") pod \"067b1287-cccb-4ca9-b817-f49673c0e17a\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.889177 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-config-data\") pod \"067b1287-cccb-4ca9-b817-f49673c0e17a\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.889234 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-nova-metadata-tls-certs\") pod \"067b1287-cccb-4ca9-b817-f49673c0e17a\" (UID: \"067b1287-cccb-4ca9-b817-f49673c0e17a\") " Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.889971 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/067b1287-cccb-4ca9-b817-f49673c0e17a-logs" (OuterVolumeSpecName: "logs") pod "067b1287-cccb-4ca9-b817-f49673c0e17a" (UID: "067b1287-cccb-4ca9-b817-f49673c0e17a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.890047 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/067b1287-cccb-4ca9-b817-f49673c0e17a-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.896896 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/067b1287-cccb-4ca9-b817-f49673c0e17a-kube-api-access-7fxbl" (OuterVolumeSpecName: "kube-api-access-7fxbl") pod "067b1287-cccb-4ca9-b817-f49673c0e17a" (UID: "067b1287-cccb-4ca9-b817-f49673c0e17a"). InnerVolumeSpecName "kube-api-access-7fxbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.931385 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "067b1287-cccb-4ca9-b817-f49673c0e17a" (UID: "067b1287-cccb-4ca9-b817-f49673c0e17a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.932038 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-config-data" (OuterVolumeSpecName: "config-data") pod "067b1287-cccb-4ca9-b817-f49673c0e17a" (UID: "067b1287-cccb-4ca9-b817-f49673c0e17a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.953219 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "067b1287-cccb-4ca9-b817-f49673c0e17a" (UID: "067b1287-cccb-4ca9-b817-f49673c0e17a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.965784 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a" path="/var/lib/kubelet/pods/04a71e46-fd9e-4a81-a06a-6e6ca2e84a3a/volumes" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.991691 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.991733 4822 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.991746 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/067b1287-cccb-4ca9-b817-f49673c0e17a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:46 crc kubenswrapper[4822]: I1201 07:13:46.991760 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fxbl\" (UniqueName: \"kubernetes.io/projected/067b1287-cccb-4ca9-b817-f49673c0e17a-kube-api-access-7fxbl\") on node \"crc\" DevicePath \"\"" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.072170 4822 generic.go:334] "Generic (PLEG): container finished" podID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerID="da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453" exitCode=0 Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.072309 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"067b1287-cccb-4ca9-b817-f49673c0e17a","Type":"ContainerDied","Data":"da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453"} Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.072388 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"067b1287-cccb-4ca9-b817-f49673c0e17a","Type":"ContainerDied","Data":"114027931bb197dae4b83569799298355e9f3c32f35b73ad0a7ff8b2125dad52"} Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.072430 4822 scope.go:117] "RemoveContainer" containerID="da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.072644 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.074410 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210","Type":"ContainerStarted","Data":"e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5"} Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.094405 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.094389701 podStartE2EDuration="2.094389701s" podCreationTimestamp="2025-12-01 07:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:47.088015724 +0000 UTC m=+1382.408823410" watchObservedRunningTime="2025-12-01 07:13:47.094389701 +0000 UTC m=+1382.415197397" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.115897 4822 scope.go:117] "RemoveContainer" containerID="25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.119794 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.140627 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.152784 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:47 crc kubenswrapper[4822]: E1201 07:13:47.153709 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-log" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.153755 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-log" Dec 01 07:13:47 crc kubenswrapper[4822]: E1201 07:13:47.153787 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-metadata" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.153805 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-metadata" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.154349 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-log" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.154438 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" containerName="nova-metadata-metadata" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.155686 4822 scope.go:117] "RemoveContainer" containerID="da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453" Dec 01 07:13:47 crc kubenswrapper[4822]: E1201 07:13:47.156190 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453\": container with ID starting with da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453 not found: ID does not exist" containerID="da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.156233 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453"} err="failed to get container status \"da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453\": rpc error: code = NotFound desc = could not find container \"da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453\": container with ID starting with da06dc85fc73ca0f249c2077cc4e7cf908f2d11d47d4852ced2f3d1ca7353453 not found: ID does not exist" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.156259 4822 scope.go:117] "RemoveContainer" containerID="25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.156528 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: E1201 07:13:47.156592 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630\": container with ID starting with 25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630 not found: ID does not exist" containerID="25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.156615 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630"} err="failed to get container status \"25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630\": rpc error: code = NotFound desc = could not find container \"25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630\": container with ID starting with 25420c34eb957f397e3ed5c301c81fbaf15b40ccf3fdc0cd475342048ee61630 not found: ID does not exist" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.162681 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.163695 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.164959 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.194539 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-config-data\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.194906 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.195038 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b725dbb9-8785-43a1-9f35-215938938f6e-logs\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.195172 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzr9r\" (UniqueName: \"kubernetes.io/projected/b725dbb9-8785-43a1-9f35-215938938f6e-kube-api-access-bzr9r\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.195208 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.296383 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzr9r\" (UniqueName: \"kubernetes.io/projected/b725dbb9-8785-43a1-9f35-215938938f6e-kube-api-access-bzr9r\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.296435 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.296496 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-config-data\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.296571 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.296604 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b725dbb9-8785-43a1-9f35-215938938f6e-logs\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.297062 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b725dbb9-8785-43a1-9f35-215938938f6e-logs\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.301987 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.303799 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.305842 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-config-data\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.314474 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzr9r\" (UniqueName: \"kubernetes.io/projected/b725dbb9-8785-43a1-9f35-215938938f6e-kube-api-access-bzr9r\") pod \"nova-metadata-0\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " pod="openstack/nova-metadata-0" Dec 01 07:13:47 crc kubenswrapper[4822]: I1201 07:13:47.483100 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:13:48 crc kubenswrapper[4822]: I1201 07:13:48.018802 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:13:48 crc kubenswrapper[4822]: I1201 07:13:48.085687 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b725dbb9-8785-43a1-9f35-215938938f6e","Type":"ContainerStarted","Data":"3105a0c08ab94f69d3f6b5522d66449b5a15b292779f6d4bf38d75b99d86e6e1"} Dec 01 07:13:48 crc kubenswrapper[4822]: I1201 07:13:48.975482 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="067b1287-cccb-4ca9-b817-f49673c0e17a" path="/var/lib/kubelet/pods/067b1287-cccb-4ca9-b817-f49673c0e17a/volumes" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.117691 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b725dbb9-8785-43a1-9f35-215938938f6e","Type":"ContainerStarted","Data":"53ee8c277f9abfc79fafeffabdd86dd7daf8f3652232dfbf0acccc1cc46201b6"} Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.117737 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b725dbb9-8785-43a1-9f35-215938938f6e","Type":"ContainerStarted","Data":"28abb98621c9dbea0f60ad4699e037dfb98ecedc14b6f3984d6224c5cbeecd9c"} Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.161162 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.161140045 podStartE2EDuration="2.161140045s" podCreationTimestamp="2025-12-01 07:13:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:13:49.161085344 +0000 UTC m=+1384.481893030" watchObservedRunningTime="2025-12-01 07:13:49.161140045 +0000 UTC m=+1384.481947731" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.266528 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5cg4f"] Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.268522 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.281345 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5cg4f"] Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.434950 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bl2z\" (UniqueName: \"kubernetes.io/projected/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-kube-api-access-2bl2z\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.435034 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-utilities\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.435753 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-catalog-content\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.537843 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-utilities\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.537992 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-catalog-content\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.538075 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bl2z\" (UniqueName: \"kubernetes.io/projected/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-kube-api-access-2bl2z\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.538376 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-utilities\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.538482 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-catalog-content\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.556424 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bl2z\" (UniqueName: \"kubernetes.io/projected/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-kube-api-access-2bl2z\") pod \"redhat-operators-5cg4f\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:49 crc kubenswrapper[4822]: I1201 07:13:49.607354 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:50 crc kubenswrapper[4822]: I1201 07:13:50.126362 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5cg4f"] Dec 01 07:13:50 crc kubenswrapper[4822]: I1201 07:13:50.459478 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 07:13:51 crc kubenswrapper[4822]: I1201 07:13:51.141412 4822 generic.go:334] "Generic (PLEG): container finished" podID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerID="62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2" exitCode=0 Dec 01 07:13:51 crc kubenswrapper[4822]: I1201 07:13:51.141464 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerDied","Data":"62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2"} Dec 01 07:13:51 crc kubenswrapper[4822]: I1201 07:13:51.141748 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerStarted","Data":"8c409aeb412d37f1293f1fb28154cad988a0e95427d9ae78a6b3d11d52567953"} Dec 01 07:13:52 crc kubenswrapper[4822]: I1201 07:13:52.154305 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerStarted","Data":"0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890"} Dec 01 07:13:52 crc kubenswrapper[4822]: I1201 07:13:52.483278 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:13:52 crc kubenswrapper[4822]: I1201 07:13:52.483591 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:13:54 crc kubenswrapper[4822]: I1201 07:13:54.183971 4822 generic.go:334] "Generic (PLEG): container finished" podID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerID="0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890" exitCode=0 Dec 01 07:13:54 crc kubenswrapper[4822]: I1201 07:13:54.184072 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerDied","Data":"0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890"} Dec 01 07:13:54 crc kubenswrapper[4822]: I1201 07:13:54.689520 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:13:54 crc kubenswrapper[4822]: I1201 07:13:54.690016 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.029475 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.194262 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerStarted","Data":"3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4"} Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.216363 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5cg4f" podStartSLOduration=2.470677172 podStartE2EDuration="6.216345951s" podCreationTimestamp="2025-12-01 07:13:49 +0000 UTC" firstStartedPulling="2025-12-01 07:13:51.143650286 +0000 UTC m=+1386.464457982" lastFinishedPulling="2025-12-01 07:13:54.889319075 +0000 UTC m=+1390.210126761" observedRunningTime="2025-12-01 07:13:55.215066855 +0000 UTC m=+1390.535874571" watchObservedRunningTime="2025-12-01 07:13:55.216345951 +0000 UTC m=+1390.537153637" Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.459299 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.492132 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.704846 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:55 crc kubenswrapper[4822]: I1201 07:13:55.704888 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:56 crc kubenswrapper[4822]: I1201 07:13:56.277340 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 07:13:57 crc kubenswrapper[4822]: I1201 07:13:57.484141 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 07:13:57 crc kubenswrapper[4822]: I1201 07:13:57.484195 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 07:13:58 crc kubenswrapper[4822]: I1201 07:13:58.497802 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:58 crc kubenswrapper[4822]: I1201 07:13:58.498300 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:13:59 crc kubenswrapper[4822]: I1201 07:13:59.607823 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:59 crc kubenswrapper[4822]: I1201 07:13:59.609016 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:13:59 crc kubenswrapper[4822]: E1201 07:13:59.818080 4822 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice/crio-2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840: Error finding container 2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840: Status 404 returned error can't find the container with id 2624602a1cf742b117dc24afe7b2fe0462ef863e497e4c3648eaf714effe1840 Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.123700 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe89f46f_916f_49c6_a29c_adac02673b60.slice/crio-conmon-7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe89f46f_916f_49c6_a29c_adac02673b60.slice/crio-7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice\": RecentStats: unable to find data in memory cache]" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.235631 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.268970 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-run-httpd\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269093 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4db6\" (UniqueName: \"kubernetes.io/projected/be89f46f-916f-49c6-a29c-adac02673b60-kube-api-access-x4db6\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269216 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-combined-ca-bundle\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269267 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-scripts\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269292 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-log-httpd\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269396 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-sg-core-conf-yaml\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269467 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-config-data\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.269580 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-ceilometer-tls-certs\") pod \"be89f46f-916f-49c6-a29c-adac02673b60\" (UID: \"be89f46f-916f-49c6-a29c-adac02673b60\") " Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.270793 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.271330 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.284787 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-scripts" (OuterVolumeSpecName: "scripts") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.295013 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be89f46f-916f-49c6-a29c-adac02673b60-kube-api-access-x4db6" (OuterVolumeSpecName: "kube-api-access-x4db6") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "kube-api-access-x4db6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.332988 4822 generic.go:334] "Generic (PLEG): container finished" podID="be89f46f-916f-49c6-a29c-adac02673b60" containerID="7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56" exitCode=137 Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.333085 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerDied","Data":"7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56"} Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.333697 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be89f46f-916f-49c6-a29c-adac02673b60","Type":"ContainerDied","Data":"38002bf7fffa35889ed282ea5b1dc514ad52eda41464a477cdb3a12ae7b711dc"} Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.333161 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.333736 4822 scope.go:117] "RemoveContainer" containerID="7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.345018 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.346760 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.372345 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.372545 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.372678 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.372756 4822 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.372838 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be89f46f-916f-49c6-a29c-adac02673b60-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.372938 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4db6\" (UniqueName: \"kubernetes.io/projected/be89f46f-916f-49c6-a29c-adac02673b60-kube-api-access-x4db6\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.386155 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-config-data" (OuterVolumeSpecName: "config-data") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.387736 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be89f46f-916f-49c6-a29c-adac02673b60" (UID: "be89f46f-916f-49c6-a29c-adac02673b60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.398669 4822 scope.go:117] "RemoveContainer" containerID="eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.423423 4822 scope.go:117] "RemoveContainer" containerID="16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.448909 4822 scope.go:117] "RemoveContainer" containerID="aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.474516 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.474781 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be89f46f-916f-49c6-a29c-adac02673b60-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.487791 4822 scope.go:117] "RemoveContainer" containerID="7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.488671 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56\": container with ID starting with 7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56 not found: ID does not exist" containerID="7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.488720 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56"} err="failed to get container status \"7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56\": rpc error: code = NotFound desc = could not find container \"7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56\": container with ID starting with 7627fa124b114500b58c4bb50c4e8378d4d3a6cc8e7b3e3fc042c868c5331f56 not found: ID does not exist" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.488751 4822 scope.go:117] "RemoveContainer" containerID="eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.489297 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd\": container with ID starting with eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd not found: ID does not exist" containerID="eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.489331 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd"} err="failed to get container status \"eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd\": rpc error: code = NotFound desc = could not find container \"eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd\": container with ID starting with eb332659d8763abcf1d6404f341c5130bb26da4059d751a99a8af4f127d02fcd not found: ID does not exist" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.489372 4822 scope.go:117] "RemoveContainer" containerID="16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.490872 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402\": container with ID starting with 16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402 not found: ID does not exist" containerID="16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.490898 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402"} err="failed to get container status \"16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402\": rpc error: code = NotFound desc = could not find container \"16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402\": container with ID starting with 16a2801aa718edd7011ee189f7c19c61422810ecc8fd0ad1a7295e9061916402 not found: ID does not exist" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.490930 4822 scope.go:117] "RemoveContainer" containerID="aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.491160 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9\": container with ID starting with aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9 not found: ID does not exist" containerID="aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.491193 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9"} err="failed to get container status \"aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9\": rpc error: code = NotFound desc = could not find container \"aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9\": container with ID starting with aa4f82abd5c57335d4e77a1214914025cfd90a1a9de3aa91a083fea0a0b368e9 not found: ID does not exist" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.670690 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.674802 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5cg4f" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="registry-server" probeResult="failure" output=< Dec 01 07:14:00 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 07:14:00 crc kubenswrapper[4822]: > Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.681838 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.701810 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.702192 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-central-agent" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702207 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-central-agent" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.702214 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="sg-core" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702220 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="sg-core" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.702250 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-notification-agent" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702257 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-notification-agent" Dec 01 07:14:00 crc kubenswrapper[4822]: E1201 07:14:00.702272 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="proxy-httpd" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702277 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="proxy-httpd" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702453 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-central-agent" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702465 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="ceilometer-notification-agent" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702477 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="sg-core" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.702489 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="be89f46f-916f-49c6-a29c-adac02673b60" containerName="proxy-httpd" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.711102 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.719356 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.719831 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.719959 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.757414 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779354 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-config-data\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779402 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp42f\" (UniqueName: \"kubernetes.io/projected/f82bf765-7694-4ad6-8680-258c9e96cde0-kube-api-access-kp42f\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779476 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-scripts\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779510 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779529 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-run-httpd\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779633 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779654 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.779683 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-log-httpd\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881498 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-config-data\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881578 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp42f\" (UniqueName: \"kubernetes.io/projected/f82bf765-7694-4ad6-8680-258c9e96cde0-kube-api-access-kp42f\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881649 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-scripts\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881681 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881708 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-run-httpd\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881744 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881775 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.881824 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-log-httpd\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.882283 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-log-httpd\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.882457 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-run-httpd\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.887484 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.887686 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-config-data\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.887685 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.888074 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.893390 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-scripts\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.900108 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp42f\" (UniqueName: \"kubernetes.io/projected/f82bf765-7694-4ad6-8680-258c9e96cde0-kube-api-access-kp42f\") pod \"ceilometer-0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " pod="openstack/ceilometer-0" Dec 01 07:14:00 crc kubenswrapper[4822]: I1201 07:14:00.961185 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be89f46f-916f-49c6-a29c-adac02673b60" path="/var/lib/kubelet/pods/be89f46f-916f-49c6-a29c-adac02673b60/volumes" Dec 01 07:14:01 crc kubenswrapper[4822]: I1201 07:14:01.035084 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:14:01 crc kubenswrapper[4822]: I1201 07:14:01.489253 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:14:01 crc kubenswrapper[4822]: W1201 07:14:01.494342 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf82bf765_7694_4ad6_8680_258c9e96cde0.slice/crio-6e476699400af4464aecc58bea0ba03ed71ad41942863dc57b72efa7230c28e3 WatchSource:0}: Error finding container 6e476699400af4464aecc58bea0ba03ed71ad41942863dc57b72efa7230c28e3: Status 404 returned error can't find the container with id 6e476699400af4464aecc58bea0ba03ed71ad41942863dc57b72efa7230c28e3 Dec 01 07:14:01 crc kubenswrapper[4822]: I1201 07:14:01.496687 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:14:02 crc kubenswrapper[4822]: I1201 07:14:02.368589 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerStarted","Data":"232bfa3723a8e9ea8d1812e63175355599297eaec99462bd5a8dbce62dfd5a9f"} Dec 01 07:14:02 crc kubenswrapper[4822]: I1201 07:14:02.369727 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerStarted","Data":"6e476699400af4464aecc58bea0ba03ed71ad41942863dc57b72efa7230c28e3"} Dec 01 07:14:03 crc kubenswrapper[4822]: I1201 07:14:03.385565 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerStarted","Data":"95d7cb1b03cca33094490f26ae44fa9b54c6204f312055eaf24990307abf0a3d"} Dec 01 07:14:04 crc kubenswrapper[4822]: I1201 07:14:04.397446 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerStarted","Data":"6f031e1b4b2e282b4cd48d5d64210b11a8231bd55a4051a1a21da63d57205bab"} Dec 01 07:14:04 crc kubenswrapper[4822]: I1201 07:14:04.704628 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:14:04 crc kubenswrapper[4822]: I1201 07:14:04.705562 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:14:04 crc kubenswrapper[4822]: I1201 07:14:04.712438 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:14:04 crc kubenswrapper[4822]: I1201 07:14:04.715361 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:14:05 crc kubenswrapper[4822]: I1201 07:14:05.411102 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerStarted","Data":"ee62047f058f3d3b037c7b5c164a2486cf6fd7832ad234039229326d5da5cb6e"} Dec 01 07:14:05 crc kubenswrapper[4822]: I1201 07:14:05.411193 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:14:05 crc kubenswrapper[4822]: I1201 07:14:05.411619 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:14:05 crc kubenswrapper[4822]: I1201 07:14:05.438695 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:14:05 crc kubenswrapper[4822]: I1201 07:14:05.450297 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.154953061 podStartE2EDuration="5.450273697s" podCreationTimestamp="2025-12-01 07:14:00 +0000 UTC" firstStartedPulling="2025-12-01 07:14:01.496410674 +0000 UTC m=+1396.817218350" lastFinishedPulling="2025-12-01 07:14:04.7917313 +0000 UTC m=+1400.112538986" observedRunningTime="2025-12-01 07:14:05.431312812 +0000 UTC m=+1400.752120498" watchObservedRunningTime="2025-12-01 07:14:05.450273697 +0000 UTC m=+1400.771081413" Dec 01 07:14:07 crc kubenswrapper[4822]: I1201 07:14:07.489875 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 07:14:07 crc kubenswrapper[4822]: I1201 07:14:07.490826 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 07:14:07 crc kubenswrapper[4822]: I1201 07:14:07.507064 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 07:14:07 crc kubenswrapper[4822]: I1201 07:14:07.507140 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 07:14:09 crc kubenswrapper[4822]: I1201 07:14:09.706228 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:14:09 crc kubenswrapper[4822]: I1201 07:14:09.787105 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:14:09 crc kubenswrapper[4822]: I1201 07:14:09.949850 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5cg4f"] Dec 01 07:14:10 crc kubenswrapper[4822]: E1201 07:14:10.392901 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice\": RecentStats: unable to find data in memory cache]" Dec 01 07:14:11 crc kubenswrapper[4822]: I1201 07:14:11.462867 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5cg4f" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="registry-server" containerID="cri-o://3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4" gracePeriod=2 Dec 01 07:14:11 crc kubenswrapper[4822]: I1201 07:14:11.943698 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.032911 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-catalog-content\") pod \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.032975 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-utilities\") pod \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.033038 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bl2z\" (UniqueName: \"kubernetes.io/projected/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-kube-api-access-2bl2z\") pod \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\" (UID: \"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba\") " Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.034435 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-utilities" (OuterVolumeSpecName: "utilities") pod "8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" (UID: "8daf4a36-0e8a-4aba-8a49-b2e9b8454fba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.043141 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-kube-api-access-2bl2z" (OuterVolumeSpecName: "kube-api-access-2bl2z") pod "8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" (UID: "8daf4a36-0e8a-4aba-8a49-b2e9b8454fba"). InnerVolumeSpecName "kube-api-access-2bl2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.135401 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.135440 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bl2z\" (UniqueName: \"kubernetes.io/projected/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-kube-api-access-2bl2z\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.170126 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" (UID: "8daf4a36-0e8a-4aba-8a49-b2e9b8454fba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.237084 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.481956 4822 generic.go:334] "Generic (PLEG): container finished" podID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerID="3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4" exitCode=0 Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.482009 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerDied","Data":"3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4"} Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.482041 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cg4f" event={"ID":"8daf4a36-0e8a-4aba-8a49-b2e9b8454fba","Type":"ContainerDied","Data":"8c409aeb412d37f1293f1fb28154cad988a0e95427d9ae78a6b3d11d52567953"} Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.482075 4822 scope.go:117] "RemoveContainer" containerID="3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.482398 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cg4f" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.528723 4822 scope.go:117] "RemoveContainer" containerID="0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.551512 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5cg4f"] Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.565916 4822 scope.go:117] "RemoveContainer" containerID="62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.570657 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5cg4f"] Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.624884 4822 scope.go:117] "RemoveContainer" containerID="3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4" Dec 01 07:14:12 crc kubenswrapper[4822]: E1201 07:14:12.625570 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4\": container with ID starting with 3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4 not found: ID does not exist" containerID="3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.625726 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4"} err="failed to get container status \"3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4\": rpc error: code = NotFound desc = could not find container \"3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4\": container with ID starting with 3bf0bccd627b14a9689c54f7d94199681be8f0a76c432f2881d2bb85b918d9d4 not found: ID does not exist" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.625770 4822 scope.go:117] "RemoveContainer" containerID="0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890" Dec 01 07:14:12 crc kubenswrapper[4822]: E1201 07:14:12.626240 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890\": container with ID starting with 0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890 not found: ID does not exist" containerID="0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.626280 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890"} err="failed to get container status \"0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890\": rpc error: code = NotFound desc = could not find container \"0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890\": container with ID starting with 0ed2baef6441965b839bcbfb2ce212df9403cc84ecf51a296c6165961539c890 not found: ID does not exist" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.626308 4822 scope.go:117] "RemoveContainer" containerID="62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2" Dec 01 07:14:12 crc kubenswrapper[4822]: E1201 07:14:12.626763 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2\": container with ID starting with 62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2 not found: ID does not exist" containerID="62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.626879 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2"} err="failed to get container status \"62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2\": rpc error: code = NotFound desc = could not find container \"62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2\": container with ID starting with 62245bd9296d24bdd5e222359a5adb562ee43b6d4b454c3741fd1b5dbc85c5d2 not found: ID does not exist" Dec 01 07:14:12 crc kubenswrapper[4822]: I1201 07:14:12.966768 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" path="/var/lib/kubelet/pods/8daf4a36-0e8a-4aba-8a49-b2e9b8454fba/volumes" Dec 01 07:14:20 crc kubenswrapper[4822]: E1201 07:14:20.656595 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice\": RecentStats: unable to find data in memory cache]" Dec 01 07:14:30 crc kubenswrapper[4822]: E1201 07:14:30.921615 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice\": RecentStats: unable to find data in memory cache]" Dec 01 07:14:31 crc kubenswrapper[4822]: I1201 07:14:31.047902 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 07:14:41 crc kubenswrapper[4822]: E1201 07:14:41.169154 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04a71e46_fd9e_4a81_a06a_6e6ca2e84a3a.slice\": RecentStats: unable to find data in memory cache]" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.078403 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.079295 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="cinder-scheduler" containerID="cri-o://adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060" gracePeriod=30 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.079429 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="probe" containerID="cri-o://619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd" gracePeriod=30 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.278624 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.390204 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.390421 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api-log" containerID="cri-o://c74bb3693663962c268382b1974734e4c228ed0e61b1129daf7f28351b85cae6" gracePeriod=30 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.390657 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api" containerID="cri-o://da2877c86b5adb431c13ff28bb62c50e650b56f0e6e01accc5c938c8b253a36c" gracePeriod=30 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.443022 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.443209 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" containerName="openstackclient" containerID="cri-o://e048e44ad53b2aa10bcb223a22d1436e3fc429ff83707e605d3e5616b88398a4" gracePeriod=2 Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.448227 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.448279 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data podName:da2985c5-716e-43ad-b892-ea29d88fa639 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:52.948263578 +0000 UTC m=+1448.269071264 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data") pod "rabbitmq-server-0" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639") : configmap "rabbitmq-config-data" not found Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.455730 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.652101 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance8845-account-delete-rb8lw"] Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.653116 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="registry-server" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.653134 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="registry-server" Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.653152 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" containerName="openstackclient" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.653158 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" containerName="openstackclient" Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.653167 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="extract-utilities" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.653173 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="extract-utilities" Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.653219 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="extract-content" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.653225 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="extract-content" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.653583 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8daf4a36-0e8a-4aba-8a49-b2e9b8454fba" containerName="registry-server" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.653631 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" containerName="openstackclient" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.655693 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.750637 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance8845-account-delete-rb8lw"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.764262 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-operator-scripts\") pod \"glance8845-account-delete-rb8lw\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.764479 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgbdk\" (UniqueName: \"kubernetes.io/projected/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-kube-api-access-pgbdk\") pod \"glance8845-account-delete-rb8lw\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.790625 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.825626 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.825891 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="ovn-northd" containerID="cri-o://131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" gracePeriod=30 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.826304 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="openstack-network-exporter" containerID="cri-o://d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5" gracePeriod=30 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.839885 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.840238 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="openstack-network-exporter" containerID="cri-o://fd4e18f76e0e6f5823923b10ab11728a84798dceeb11a8364e726a5f18115231" gracePeriod=300 Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.869666 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgbdk\" (UniqueName: \"kubernetes.io/projected/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-kube-api-access-pgbdk\") pod \"glance8845-account-delete-rb8lw\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.869760 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-operator-scripts\") pod \"glance8845-account-delete-rb8lw\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.870785 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-operator-scripts\") pod \"glance8845-account-delete-rb8lw\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.941056 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-h9ttd"] Dec 01 07:14:52 crc kubenswrapper[4822]: I1201 07:14:52.966736 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgbdk\" (UniqueName: \"kubernetes.io/projected/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-kube-api-access-pgbdk\") pod \"glance8845-account-delete-rb8lw\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.976177 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.976227 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data podName:da2985c5-716e-43ad-b892-ea29d88fa639 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:53.976212988 +0000 UTC m=+1449.297020674 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data") pod "rabbitmq-server-0" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639") : configmap "rabbitmq-config-data" not found Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.977495 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:52 crc kubenswrapper[4822]: E1201 07:14:52.977521 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data podName:a1229c08-35a5-4f16-8334-f32bb9b852b6 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:53.477512314 +0000 UTC m=+1448.798320000 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data") pod "rabbitmq-cell1-server-0" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6") : configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.024350 4822 generic.go:334] "Generic (PLEG): container finished" podID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerID="c74bb3693663962c268382b1974734e4c228ed0e61b1129daf7f28351b85cae6" exitCode=143 Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.028838 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3","Type":"ContainerDied","Data":"c74bb3693663962c268382b1974734e4c228ed0e61b1129daf7f28351b85cae6"} Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.028895 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-h9ttd"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.028916 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.029351 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="openstack-network-exporter" containerID="cri-o://399ace0799f2c11abaf11cd88fb9941adf1ed68dea1ffe69cd7f58d95ad171e4" gracePeriod=300 Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.068830 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican6e4d-account-delete-bz5tp"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.070328 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.072942 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.130094 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican6e4d-account-delete-bz5tp"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.169821 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-zndwr"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.193316 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb84b\" (UniqueName: \"kubernetes.io/projected/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-kube-api-access-tb84b\") pod \"barbican6e4d-account-delete-bz5tp\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.193584 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts\") pod \"barbican6e4d-account-delete-bz5tp\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.228596 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinderac9b-account-delete-lmq7r"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.229854 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.268101 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-zndwr"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.279109 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderac9b-account-delete-lmq7r"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.288962 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement75d6-account-delete-46gbh"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.290273 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.291440 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="ovsdbserver-nb" containerID="cri-o://47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61" gracePeriod=300 Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.309062 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb84b\" (UniqueName: \"kubernetes.io/projected/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-kube-api-access-tb84b\") pod \"barbican6e4d-account-delete-bz5tp\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.309348 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4btb5\" (UniqueName: \"kubernetes.io/projected/33a2ee0e-4d60-46f9-9f2a-a094af634a64-kube-api-access-4btb5\") pod \"cinderac9b-account-delete-lmq7r\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.309395 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a2ee0e-4d60-46f9-9f2a-a094af634a64-operator-scripts\") pod \"cinderac9b-account-delete-lmq7r\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.309462 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts\") pod \"barbican6e4d-account-delete-bz5tp\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.310216 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts\") pod \"barbican6e4d-account-delete-bz5tp\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.323588 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement75d6-account-delete-46gbh"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.341303 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-xppc9"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.377468 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-xppc9"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.404318 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron40f1-account-delete-rb6h6"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.405585 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.429025 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4btb5\" (UniqueName: \"kubernetes.io/projected/33a2ee0e-4d60-46f9-9f2a-a094af634a64-kube-api-access-4btb5\") pod \"cinderac9b-account-delete-lmq7r\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.429073 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a2ee0e-4d60-46f9-9f2a-a094af634a64-operator-scripts\") pod \"cinderac9b-account-delete-lmq7r\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.429187 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts\") pod \"placement75d6-account-delete-46gbh\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.429210 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7667\" (UniqueName: \"kubernetes.io/projected/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-kube-api-access-h7667\") pod \"placement75d6-account-delete-46gbh\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.432460 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a2ee0e-4d60-46f9-9f2a-a094af634a64-operator-scripts\") pod \"cinderac9b-account-delete-lmq7r\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.441913 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb84b\" (UniqueName: \"kubernetes.io/projected/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-kube-api-access-tb84b\") pod \"barbican6e4d-account-delete-bz5tp\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: E1201 07:14:53.457061 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.462149 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:14:53 crc kubenswrapper[4822]: E1201 07:14:53.474359 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.474496 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="ovsdbserver-sb" containerID="cri-o://d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a" gracePeriod=300 Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.474685 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-7wprf"] Dec 01 07:14:53 crc kubenswrapper[4822]: E1201 07:14:53.487191 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 01 07:14:53 crc kubenswrapper[4822]: E1201 07:14:53.487245 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="ovsdbserver-nb" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.510673 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-7wprf"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.531212 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts\") pod \"placement75d6-account-delete-46gbh\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.531257 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7667\" (UniqueName: \"kubernetes.io/projected/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-kube-api-access-h7667\") pod \"placement75d6-account-delete-46gbh\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.531290 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8858\" (UniqueName: \"kubernetes.io/projected/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-kube-api-access-k8858\") pod \"neutron40f1-account-delete-rb6h6\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.531477 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts\") pod \"neutron40f1-account-delete-rb6h6\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.532413 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts\") pod \"placement75d6-account-delete-46gbh\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:53 crc kubenswrapper[4822]: E1201 07:14:53.534179 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:53 crc kubenswrapper[4822]: E1201 07:14:53.534236 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data podName:a1229c08-35a5-4f16-8334-f32bb9b852b6 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:54.534219201 +0000 UTC m=+1449.855026887 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data") pod "rabbitmq-cell1-server-0" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6") : configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.562748 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron40f1-account-delete-rb6h6"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.572121 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4btb5\" (UniqueName: \"kubernetes.io/projected/33a2ee0e-4d60-46f9-9f2a-a094af634a64-kube-api-access-4btb5\") pod \"cinderac9b-account-delete-lmq7r\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.605082 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-mvk28"] Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.629050 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.645009 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8858\" (UniqueName: \"kubernetes.io/projected/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-kube-api-access-k8858\") pod \"neutron40f1-account-delete-rb6h6\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.645284 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts\") pod \"neutron40f1-account-delete-rb6h6\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:53 crc kubenswrapper[4822]: I1201 07:14:53.646221 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts\") pod \"neutron40f1-account-delete-rb6h6\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:53.694388 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-mvk28"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:53.703166 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7667\" (UniqueName: \"kubernetes.io/projected/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-kube-api-access-h7667\") pod \"placement75d6-account-delete-46gbh\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:53.768978 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8858\" (UniqueName: \"kubernetes.io/projected/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-kube-api-access-k8858\") pod \"neutron40f1-account-delete-rb6h6\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:53.769332 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:53.785359 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:53.851256 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:53.877022 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:53.877085 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="ovn-northd" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:53.921762 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-mhhlz"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:53.963183 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.005743 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006180 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-server" containerID="cri-o://4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006580 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="swift-recon-cron" containerID="cri-o://b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006653 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-auditor" containerID="cri-o://c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006660 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-updater" containerID="cri-o://39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006705 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-replicator" containerID="cri-o://0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006769 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-server" containerID="cri-o://2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006805 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-reaper" containerID="cri-o://cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006837 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="rsync" containerID="cri-o://cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006849 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-replicator" containerID="cri-o://cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006840 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-auditor" containerID="cri-o://d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006919 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-expirer" containerID="cri-o://094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.006995 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-replicator" containerID="cri-o://13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.007033 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-updater" containerID="cri-o://c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.007068 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-auditor" containerID="cri-o://e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.007090 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-server" containerID="cri-o://8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:54.172193 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:54.172259 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data podName:da2985c5-716e-43ad-b892-ea29d88fa639 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:56.172238279 +0000 UTC m=+1451.493045965 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data") pod "rabbitmq-server-0" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639") : configmap "rabbitmq-config-data" not found Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.269912 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-mhhlz"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.336856 4822 generic.go:334] "Generic (PLEG): container finished" podID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerID="d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5" exitCode=2 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.336944 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fd2326e5-f8a2-47ca-8519-576caa1825c5","Type":"ContainerDied","Data":"d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5"} Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.364841 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_735cd01e-931e-44b0-968d-ebe114278896/ovsdbserver-sb/0.log" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.364877 4822 generic.go:334] "Generic (PLEG): container finished" podID="735cd01e-931e-44b0-968d-ebe114278896" containerID="fd4e18f76e0e6f5823923b10ab11728a84798dceeb11a8364e726a5f18115231" exitCode=2 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.364890 4822 generic.go:334] "Generic (PLEG): container finished" podID="735cd01e-931e-44b0-968d-ebe114278896" containerID="d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a" exitCode=143 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.364937 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"735cd01e-931e-44b0-968d-ebe114278896","Type":"ContainerDied","Data":"fd4e18f76e0e6f5823923b10ab11728a84798dceeb11a8364e726a5f18115231"} Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.364962 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"735cd01e-931e-44b0-968d-ebe114278896","Type":"ContainerDied","Data":"d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a"} Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.371478 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_13737e91-3ce4-4bcc-a605-d1591596c446/ovsdbserver-nb/0.log" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.371517 4822 generic.go:334] "Generic (PLEG): container finished" podID="13737e91-3ce4-4bcc-a605-d1591596c446" containerID="399ace0799f2c11abaf11cd88fb9941adf1ed68dea1ffe69cd7f58d95ad171e4" exitCode=2 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.371543 4822 generic.go:334] "Generic (PLEG): container finished" podID="13737e91-3ce4-4bcc-a605-d1591596c446" containerID="47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61" exitCode=143 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.371582 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"13737e91-3ce4-4bcc-a605-d1591596c446","Type":"ContainerDied","Data":"399ace0799f2c11abaf11cd88fb9941adf1ed68dea1ffe69cd7f58d95ad171e4"} Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.371609 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"13737e91-3ce4-4bcc-a605-d1591596c446","Type":"ContainerDied","Data":"47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61"} Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.408787 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell04972-account-delete-qmqf7"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.410128 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.446165 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell04972-account-delete-qmqf7"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.477401 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-q78vl"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.479237 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f05675-879d-4586-af90-8aa6b11ad8a3-operator-scripts\") pod \"novacell04972-account-delete-qmqf7\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.479289 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7p9f\" (UniqueName: \"kubernetes.io/projected/a3f05675-879d-4586-af90-8aa6b11ad8a3-kube-api-access-w7p9f\") pod \"novacell04972-account-delete-qmqf7\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.502204 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-5prf9"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.502515 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi33d4-account-delete-5vtpn"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.507675 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.516960 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-8t992"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.517144 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-8t992" podUID="fa49318e-e2cd-40c4-910c-3e91feae5e73" containerName="openstack-network-exporter" containerID="cri-o://97d660e529a0aaa28a2d2a3858d4da0114b11b557d1f19a98f4c4659e698d302" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.522743 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-zlscm"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.522932 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerName="dnsmasq-dns" containerID="cri-o://29cb191f67cb3d995ae68c5f80b0794867e6eb356848867309a88a2a42dd2ba1" gracePeriod=10 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.543751 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi33d4-account-delete-5vtpn"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.554944 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.555218 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-log" containerID="cri-o://0a586d29377fca2fb0c88e3caf95850b3d5091a08ee0961b98a076104df131df" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.555638 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-httpd" containerID="cri-o://b3c842a2cb463bfede65789670bfcdeca18f9089a409fd8da36121671752306c" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.569651 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.571746 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-log" containerID="cri-o://8c7dc884cc27a11dfe3c85bef88b9e7c88b3ac50ba6cb9054583b104030b6680" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.572198 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-httpd" containerID="cri-o://70b0818800c2616bc33b123c3a4baa1c0af71f6cf5e88867c893507114ae9b3c" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.581216 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts\") pod \"novaapi33d4-account-delete-5vtpn\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.581405 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cphjg\" (UniqueName: \"kubernetes.io/projected/b1312168-fba0-46d6-8ca3-346303262924-kube-api-access-cphjg\") pod \"novaapi33d4-account-delete-5vtpn\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.581920 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f05675-879d-4586-af90-8aa6b11ad8a3-operator-scripts\") pod \"novacell04972-account-delete-qmqf7\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.581986 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7p9f\" (UniqueName: \"kubernetes.io/projected/a3f05675-879d-4586-af90-8aa6b11ad8a3-kube-api-access-w7p9f\") pod \"novacell04972-account-delete-qmqf7\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.583202 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f05675-879d-4586-af90-8aa6b11ad8a3-operator-scripts\") pod \"novacell04972-account-delete-qmqf7\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:54.583266 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:54 crc kubenswrapper[4822]: E1201 07:14:54.583309 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data podName:a1229c08-35a5-4f16-8334-f32bb9b852b6 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:56.583293432 +0000 UTC m=+1451.904101118 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data") pod "rabbitmq-cell1-server-0" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6") : configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.583571 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.589597 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-55d788fdd6-vxlcs"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.589870 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-55d788fdd6-vxlcs" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-log" containerID="cri-o://7b63b2bf9e63e64f024e0d88a86fea704c8fb9efcca3eec122913d83b9c7b804" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.590006 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-55d788fdd6-vxlcs" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-api" containerID="cri-o://42a4de3b243b1977444983268e3455770ea130dc481d53f206e1f70bf6eac99c" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.606787 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-685677fbf5-d4ncs"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.607048 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-685677fbf5-d4ncs" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-httpd" containerID="cri-o://34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.607816 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-685677fbf5-d4ncs" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-server" containerID="cri-o://58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.610542 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7p9f\" (UniqueName: \"kubernetes.io/projected/a3f05675-879d-4586-af90-8aa6b11ad8a3-kube-api-access-w7p9f\") pod \"novacell04972-account-delete-qmqf7\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.619479 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-54446c57b5-gw6bc"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.619735 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-54446c57b5-gw6bc" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-api" containerID="cri-o://995ca5fdebaa312592d184b7f0d85a781106462fc236fe76b19b5c65128c9922" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.619878 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-54446c57b5-gw6bc" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-httpd" containerID="cri-o://819d59f612f038c2b40fbd5bd6e0ae52aec5cfa454b47bb1de884f3a09c04531" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.631097 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-jv7nr"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.651695 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-jv7nr"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.663423 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-fqb49"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.671196 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-fqb49"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.683170 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cphjg\" (UniqueName: \"kubernetes.io/projected/b1312168-fba0-46d6-8ca3-346303262924-kube-api-access-cphjg\") pod \"novaapi33d4-account-delete-5vtpn\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.683261 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts\") pod \"novaapi33d4-account-delete-5vtpn\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.685180 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts\") pod \"novaapi33d4-account-delete-5vtpn\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.689703 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6bdbb4cd8b-5ttmd"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.690091 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api-log" containerID="cri-o://b2656b9a99cb8b9b157e9e99731e4fa691f7956b8db22c14b097df4e6e5d524e" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.690401 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api" containerID="cri-o://3e945f46d94da2037046bdb5e78c0bab8aeb085af05bb39debf197f614a96054" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.696776 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-56d5c94499-xwfh7"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.700008 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-56d5c94499-xwfh7" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker-log" containerID="cri-o://eac2f0524072a06a976ebc4a00a7e18a6672bbea7169f996d10dfd7fb8ff0ee3" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.700064 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-56d5c94499-xwfh7" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker" containerID="cri-o://ec6d0903bf7c036cf0d4792cee042afc8037b300b849a72d91c313222cd086a0" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.706463 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.706775 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener-log" containerID="cri-o://0904f4c58b2a13e4a8f5dea770e37c7608b4c6d0e98aeca868ba95e684843122" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.706907 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener" containerID="cri-o://d9ad4dec5b7ced2f8a0abc30fb3b367a52102c69895a9418a23c7124dd5666eb" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.715473 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.731141 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.731347 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-log" containerID="cri-o://28abb98621c9dbea0f60ad4699e037dfb98ecedc14b6f3984d6224c5cbeecd9c" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.731471 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-metadata" containerID="cri-o://53ee8c277f9abfc79fafeffabdd86dd7daf8f3652232dfbf0acccc1cc46201b6" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.739152 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cphjg\" (UniqueName: \"kubernetes.io/projected/b1312168-fba0-46d6-8ca3-346303262924-kube-api-access-cphjg\") pod \"novaapi33d4-account-delete-5vtpn\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.743687 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.743909 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerName="nova-scheduler-scheduler" containerID="cri-o://e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.755619 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-2v2wt"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.765487 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="rabbitmq" containerID="cri-o://67b4ee72481fb3afcfc3392e80b6461b38dd56f3a4807eaae7eb4e9cb55e7a0c" gracePeriod=604800 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.766168 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-2v2wt"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.794232 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.809765 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.810016 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-log" containerID="cri-o://99c5094f6036c81b6e4b694863a462ef98197e4530f7c0d2c8e4bc0a7902f727" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.811214 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-api" containerID="cri-o://2737b375be318a62945c13daa6a442680918aa2addf2fb31b4a1be4501d00b4c" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.858910 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1050-account-create-update-2khv4"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.871733 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1050-account-create-update-2khv4"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.880624 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.880882 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="03cbadb0-cfc2-4ade-9e82-efd876532994" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f" gracePeriod=30 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.895502 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="rabbitmq" containerID="cri-o://eb9c3b14412dab696cda88040a38b9cdac23b8d8872cc1f5f086a31ccba67ffc" gracePeriod=604800 Dec 01 07:14:54 crc kubenswrapper[4822]: I1201 07:14:54.939583 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s65nh"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.012905 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f992a05-1279-4c84-b09b-977a6b1e4ea8" path="/var/lib/kubelet/pods/3f992a05-1279-4c84-b09b-977a6b1e4ea8/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.014133 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a55bca0-5d9e-4486-901b-829e001c1b16" path="/var/lib/kubelet/pods/7a55bca0-5d9e-4486-901b-829e001c1b16/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.014759 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5762ef7-aac0-426f-a391-d50a06868dee" path="/var/lib/kubelet/pods/a5762ef7-aac0-426f-a391-d50a06868dee/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.041875 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0" path="/var/lib/kubelet/pods/b77a1e8c-bc1c-457c-9a39-8c3bdfc932f0/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.042889 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be029423-e53b-476b-88ef-99ad1623a8c4" path="/var/lib/kubelet/pods/be029423-e53b-476b-88ef-99ad1623a8c4/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.043637 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693" path="/var/lib/kubelet/pods/ddee53c1-8f3a-4d49-b0d3-ed23eb9c6693/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.044466 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb723787-e103-4feb-9aba-df1a78059e4c" path="/var/lib/kubelet/pods/eb723787-e103-4feb-9aba-df1a78059e4c/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.045532 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f13d0961-136f-4ad6-aa25-23c7e439847a" path="/var/lib/kubelet/pods/f13d0961-136f-4ad6-aa25-23c7e439847a/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.046384 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f678b4fe-2c75-49a6-a191-7e31d28eea02" path="/var/lib/kubelet/pods/f678b4fe-2c75-49a6-a191-7e31d28eea02/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.047393 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9401cf-62a5-407e-8ac0-88d0eecc830d" path="/var/lib/kubelet/pods/fb9401cf-62a5-407e-8ac0-88d0eecc830d/volumes" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.047966 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.048006 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s65nh"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.048022 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vgj7h"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.048776 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" containerName="nova-cell1-conductor-conductor" containerID="cri-o://8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" gracePeriod=30 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.111961 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerName="galera" containerID="cri-o://d67bae7c70f17ec833b424c5adfa870275d1a2d4526a3192bf63ea5af23f096a" gracePeriod=30 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.120754 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vgj7h"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.159949 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.160204 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerName="nova-cell0-conductor-conductor" containerID="cri-o://aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" gracePeriod=30 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.225385 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.241575 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.328996 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a is running failed: container process not found" containerID="d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.333309 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a is running failed: container process not found" containerID="d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.336700 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" containerID="cri-o://c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" gracePeriod=30 Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.345687 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a is running failed: container process not found" containerID="d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.345766 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="ovsdbserver-sb" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.417037 4822 generic.go:334] "Generic (PLEG): container finished" podID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerID="29cb191f67cb3d995ae68c5f80b0794867e6eb356848867309a88a2a42dd2ba1" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.417212 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" event={"ID":"8f969273-1c0e-4ac7-aae1-e04cea6c864f","Type":"ContainerDied","Data":"29cb191f67cb3d995ae68c5f80b0794867e6eb356848867309a88a2a42dd2ba1"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.458164 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8t992_fa49318e-e2cd-40c4-910c-3e91feae5e73/openstack-network-exporter/0.log" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.458227 4822 generic.go:334] "Generic (PLEG): container finished" podID="fa49318e-e2cd-40c4-910c-3e91feae5e73" containerID="97d660e529a0aaa28a2d2a3858d4da0114b11b557d1f19a98f4c4659e698d302" exitCode=2 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.458316 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8t992" event={"ID":"fa49318e-e2cd-40c4-910c-3e91feae5e73","Type":"ContainerDied","Data":"97d660e529a0aaa28a2d2a3858d4da0114b11b557d1f19a98f4c4659e698d302"} Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.471572 4822 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 01 07:14:55 crc kubenswrapper[4822]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 01 07:14:55 crc kubenswrapper[4822]: + source /usr/local/bin/container-scripts/functions Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNBridge=br-int Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNRemote=tcp:localhost:6642 Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNEncapType=geneve Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNAvailabilityZones= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ EnableChassisAsGateway=true Dec 01 07:14:55 crc kubenswrapper[4822]: ++ PhysicalNetworks= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNHostName= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 01 07:14:55 crc kubenswrapper[4822]: ++ ovs_dir=/var/lib/openvswitch Dec 01 07:14:55 crc kubenswrapper[4822]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 01 07:14:55 crc kubenswrapper[4822]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 01 07:14:55 crc kubenswrapper[4822]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 01 07:14:55 crc kubenswrapper[4822]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 01 07:14:55 crc kubenswrapper[4822]: + sleep 0.5 Dec 01 07:14:55 crc kubenswrapper[4822]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 01 07:14:55 crc kubenswrapper[4822]: + cleanup_ovsdb_server_semaphore Dec 01 07:14:55 crc kubenswrapper[4822]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 01 07:14:55 crc kubenswrapper[4822]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 01 07:14:55 crc kubenswrapper[4822]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-5prf9" message=< Dec 01 07:14:55 crc kubenswrapper[4822]: Exiting ovsdb-server (5) [ OK ] Dec 01 07:14:55 crc kubenswrapper[4822]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 01 07:14:55 crc kubenswrapper[4822]: + source /usr/local/bin/container-scripts/functions Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNBridge=br-int Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNRemote=tcp:localhost:6642 Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNEncapType=geneve Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNAvailabilityZones= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ EnableChassisAsGateway=true Dec 01 07:14:55 crc kubenswrapper[4822]: ++ PhysicalNetworks= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNHostName= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 01 07:14:55 crc kubenswrapper[4822]: ++ ovs_dir=/var/lib/openvswitch Dec 01 07:14:55 crc kubenswrapper[4822]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 01 07:14:55 crc kubenswrapper[4822]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 01 07:14:55 crc kubenswrapper[4822]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 01 07:14:55 crc kubenswrapper[4822]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 01 07:14:55 crc kubenswrapper[4822]: + sleep 0.5 Dec 01 07:14:55 crc kubenswrapper[4822]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 01 07:14:55 crc kubenswrapper[4822]: + cleanup_ovsdb_server_semaphore Dec 01 07:14:55 crc kubenswrapper[4822]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 01 07:14:55 crc kubenswrapper[4822]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 01 07:14:55 crc kubenswrapper[4822]: > Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.471617 4822 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 01 07:14:55 crc kubenswrapper[4822]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 01 07:14:55 crc kubenswrapper[4822]: + source /usr/local/bin/container-scripts/functions Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNBridge=br-int Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNRemote=tcp:localhost:6642 Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNEncapType=geneve Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNAvailabilityZones= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ EnableChassisAsGateway=true Dec 01 07:14:55 crc kubenswrapper[4822]: ++ PhysicalNetworks= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ OVNHostName= Dec 01 07:14:55 crc kubenswrapper[4822]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 01 07:14:55 crc kubenswrapper[4822]: ++ ovs_dir=/var/lib/openvswitch Dec 01 07:14:55 crc kubenswrapper[4822]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 01 07:14:55 crc kubenswrapper[4822]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 01 07:14:55 crc kubenswrapper[4822]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 01 07:14:55 crc kubenswrapper[4822]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 01 07:14:55 crc kubenswrapper[4822]: + sleep 0.5 Dec 01 07:14:55 crc kubenswrapper[4822]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 01 07:14:55 crc kubenswrapper[4822]: + cleanup_ovsdb_server_semaphore Dec 01 07:14:55 crc kubenswrapper[4822]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 01 07:14:55 crc kubenswrapper[4822]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 01 07:14:55 crc kubenswrapper[4822]: > pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" containerID="cri-o://2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.471677 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" containerID="cri-o://2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" gracePeriod=30 Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.482904 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.482992 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.483168 4822 log.go:32] "ExecSync cmd from runtime service failed" err=< Dec 01 07:14:55 crc kubenswrapper[4822]: rpc error: code = Unknown desc = command error: setns `mnt`: Bad file descriptor Dec 01 07:14:55 crc kubenswrapper[4822]: fail startup Dec 01 07:14:55 crc kubenswrapper[4822]: , stdout: , stderr: , exit code -1 Dec 01 07:14:55 crc kubenswrapper[4822]: > containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.492322 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.492462 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.492722 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.494419 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.494476 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.494852 4822 generic.go:334] "Generic (PLEG): container finished" podID="b725dbb9-8785-43a1-9f35-215938938f6e" containerID="28abb98621c9dbea0f60ad4699e037dfb98ecedc14b6f3984d6224c5cbeecd9c" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.494961 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b725dbb9-8785-43a1-9f35-215938938f6e","Type":"ContainerDied","Data":"28abb98621c9dbea0f60ad4699e037dfb98ecedc14b6f3984d6224c5cbeecd9c"} Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.500389 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.500450 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerName="nova-scheduler-scheduler" Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.507098 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:14:55 crc kubenswrapper[4822]: E1201 07:14:55.507162 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533555 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533583 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533592 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533600 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533608 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533614 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533619 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533626 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533632 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533638 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533644 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533651 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533657 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533664 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533726 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533752 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533763 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533772 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533781 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533791 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533800 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533809 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533818 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533827 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533836 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533845 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533853 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.533862 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.544407 4822 generic.go:334] "Generic (PLEG): container finished" podID="020e76da-9968-4212-a34c-c01c8f8979de" containerID="7b63b2bf9e63e64f024e0d88a86fea704c8fb9efcca3eec122913d83b9c7b804" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.544475 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55d788fdd6-vxlcs" event={"ID":"020e76da-9968-4212-a34c-c01c8f8979de","Type":"ContainerDied","Data":"7b63b2bf9e63e64f024e0d88a86fea704c8fb9efcca3eec122913d83b9c7b804"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.545161 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_735cd01e-931e-44b0-968d-ebe114278896/ovsdbserver-sb/0.log" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.545214 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.547389 4822 generic.go:334] "Generic (PLEG): container finished" podID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerID="34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.547455 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-685677fbf5-d4ncs" event={"ID":"7f721b33-b6df-4e86-90bc-52a7d0c49cbf","Type":"ContainerDied","Data":"34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.553955 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_13737e91-3ce4-4bcc-a605-d1591596c446/ovsdbserver-nb/0.log" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.554039 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.560200 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.563228 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8t992_fa49318e-e2cd-40c4-910c-3e91feae5e73/openstack-network-exporter/0.log" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.563302 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.571417 4822 generic.go:334] "Generic (PLEG): container finished" podID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerID="99c5094f6036c81b6e4b694863a462ef98197e4530f7c0d2c8e4bc0a7902f727" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.571506 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3076b4c6-b401-48a5-8343-a34d9c979ea3","Type":"ContainerDied","Data":"99c5094f6036c81b6e4b694863a462ef98197e4530f7c0d2c8e4bc0a7902f727"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.575745 4822 generic.go:334] "Generic (PLEG): container finished" podID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerID="b2656b9a99cb8b9b157e9e99731e4fa691f7956b8db22c14b097df4e6e5d524e" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.575808 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" event={"ID":"4f5cf9f6-a48b-455b-aef3-952697eb1a09","Type":"ContainerDied","Data":"b2656b9a99cb8b9b157e9e99731e4fa691f7956b8db22c14b097df4e6e5d524e"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.584077 4822 generic.go:334] "Generic (PLEG): container finished" podID="946e1406-8dc2-445d-9d5a-f801260d554b" containerID="619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.584141 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"946e1406-8dc2-445d-9d5a-f801260d554b","Type":"ContainerDied","Data":"619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.592850 4822 generic.go:334] "Generic (PLEG): container finished" podID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerID="819d59f612f038c2b40fbd5bd6e0ae52aec5cfa454b47bb1de884f3a09c04531" exitCode=0 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.592975 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54446c57b5-gw6bc" event={"ID":"4e7558a6-6804-48af-b74d-394b7c5dd57e","Type":"ContainerDied","Data":"819d59f612f038c2b40fbd5bd6e0ae52aec5cfa454b47bb1de884f3a09c04531"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.594422 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.596895 4822 generic.go:334] "Generic (PLEG): container finished" podID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerID="eac2f0524072a06a976ebc4a00a7e18a6672bbea7169f996d10dfd7fb8ff0ee3" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.596963 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56d5c94499-xwfh7" event={"ID":"e2b0531f-a401-46e6-80f0-7f0023e9a0d6","Type":"ContainerDied","Data":"eac2f0524072a06a976ebc4a00a7e18a6672bbea7169f996d10dfd7fb8ff0ee3"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.602518 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerID="0904f4c58b2a13e4a8f5dea770e37c7608b4c6d0e98aeca868ba95e684843122" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.603403 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" event={"ID":"a1df0a4e-4359-436c-9937-e4af9b500ae5","Type":"ContainerDied","Data":"0904f4c58b2a13e4a8f5dea770e37c7608b4c6d0e98aeca868ba95e684843122"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607342 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config\") pod \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607388 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607438 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt5v5\" (UniqueName: \"kubernetes.io/projected/13737e91-3ce4-4bcc-a605-d1591596c446-kube-api-access-wt5v5\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607454 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovn-rundir\") pod \"fa49318e-e2cd-40c4-910c-3e91feae5e73\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607477 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-ovsdbserver-sb-tls-certs\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607496 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-metrics-certs-tls-certs\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607524 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-combined-ca-bundle\") pod \"fa49318e-e2cd-40c4-910c-3e91feae5e73\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607576 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config-secret\") pod \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607598 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/735cd01e-931e-44b0-968d-ebe114278896-ovsdb-rundir\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607619 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-metrics-certs-tls-certs\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607643 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607706 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdb-rundir\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607743 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovs-rundir\") pod \"fa49318e-e2cd-40c4-910c-3e91feae5e73\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607768 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdsp4\" (UniqueName: \"kubernetes.io/projected/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-kube-api-access-wdsp4\") pod \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607795 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qbwf\" (UniqueName: \"kubernetes.io/projected/735cd01e-931e-44b0-968d-ebe114278896-kube-api-access-5qbwf\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607824 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-combined-ca-bundle\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607855 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-metrics-certs-tls-certs\") pod \"fa49318e-e2cd-40c4-910c-3e91feae5e73\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.607889 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-combined-ca-bundle\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609037 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "fa49318e-e2cd-40c4-910c-3e91feae5e73" (UID: "fa49318e-e2cd-40c4-910c-3e91feae5e73"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609079 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "fa49318e-e2cd-40c4-910c-3e91feae5e73" (UID: "fa49318e-e2cd-40c4-910c-3e91feae5e73"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609536 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/735cd01e-931e-44b0-968d-ebe114278896-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609780 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-config\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609829 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdbserver-nb-tls-certs\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609851 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-combined-ca-bundle\") pod \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\" (UID: \"92fc89df-ebe6-45e8-b0f3-4a4d20476ee4\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609887 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-config\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609948 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-scripts\") pod \"735cd01e-931e-44b0-968d-ebe114278896\" (UID: \"735cd01e-931e-44b0-968d-ebe114278896\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.609991 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvhb2\" (UniqueName: \"kubernetes.io/projected/fa49318e-e2cd-40c4-910c-3e91feae5e73-kube-api-access-cvhb2\") pod \"fa49318e-e2cd-40c4-910c-3e91feae5e73\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.610029 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-scripts\") pod \"13737e91-3ce4-4bcc-a605-d1591596c446\" (UID: \"13737e91-3ce4-4bcc-a605-d1591596c446\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.610071 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa49318e-e2cd-40c4-910c-3e91feae5e73-config\") pod \"fa49318e-e2cd-40c4-910c-3e91feae5e73\" (UID: \"fa49318e-e2cd-40c4-910c-3e91feae5e73\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.611110 4822 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovs-rundir\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.611131 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fa49318e-e2cd-40c4-910c-3e91feae5e73-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.611140 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/735cd01e-931e-44b0-968d-ebe114278896-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.611143 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.612462 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa49318e-e2cd-40c4-910c-3e91feae5e73-config" (OuterVolumeSpecName: "config") pod "fa49318e-e2cd-40c4-910c-3e91feae5e73" (UID: "fa49318e-e2cd-40c4-910c-3e91feae5e73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.617043 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/735cd01e-931e-44b0-968d-ebe114278896-kube-api-access-5qbwf" (OuterVolumeSpecName: "kube-api-access-5qbwf") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "kube-api-access-5qbwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.617658 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.620880 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.633618 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-config" (OuterVolumeSpecName: "config") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.634001 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-config" (OuterVolumeSpecName: "config") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.634339 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-scripts" (OuterVolumeSpecName: "scripts") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.634437 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-scripts" (OuterVolumeSpecName: "scripts") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.639608 4822 generic.go:334] "Generic (PLEG): container finished" podID="74508f35-c5cd-4e07-8883-831d2de65f35" containerID="8c7dc884cc27a11dfe3c85bef88b9e7c88b3ac50ba6cb9054583b104030b6680" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.639802 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"74508f35-c5cd-4e07-8883-831d2de65f35","Type":"ContainerDied","Data":"8c7dc884cc27a11dfe3c85bef88b9e7c88b3ac50ba6cb9054583b104030b6680"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.645770 4822 generic.go:334] "Generic (PLEG): container finished" podID="92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" containerID="e048e44ad53b2aa10bcb223a22d1436e3fc429ff83707e605d3e5616b88398a4" exitCode=137 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.645856 4822 scope.go:117] "RemoveContainer" containerID="e048e44ad53b2aa10bcb223a22d1436e3fc429ff83707e605d3e5616b88398a4" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.646304 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.646807 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa49318e-e2cd-40c4-910c-3e91feae5e73-kube-api-access-cvhb2" (OuterVolumeSpecName: "kube-api-access-cvhb2") pod "fa49318e-e2cd-40c4-910c-3e91feae5e73" (UID: "fa49318e-e2cd-40c4-910c-3e91feae5e73"). InnerVolumeSpecName "kube-api-access-cvhb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.646915 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13737e91-3ce4-4bcc-a605-d1591596c446-kube-api-access-wt5v5" (OuterVolumeSpecName: "kube-api-access-wt5v5") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "kube-api-access-wt5v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.650179 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-kube-api-access-wdsp4" (OuterVolumeSpecName: "kube-api-access-wdsp4") pod "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" (UID: "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4"). InnerVolumeSpecName "kube-api-access-wdsp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.650366 4822 generic.go:334] "Generic (PLEG): container finished" podID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerID="0a586d29377fca2fb0c88e3caf95850b3d5091a08ee0961b98a076104df131df" exitCode=143 Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.650409 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4508389c-d1b8-4646-902e-4fbb597de2b7","Type":"ContainerDied","Data":"0a586d29377fca2fb0c88e3caf95850b3d5091a08ee0961b98a076104df131df"} Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.716248 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-swift-storage-0\") pod \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.716352 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-config\") pod \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.716411 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg8g8\" (UniqueName: \"kubernetes.io/projected/8f969273-1c0e-4ac7-aae1-e04cea6c864f-kube-api-access-rg8g8\") pod \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.716596 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-sb\") pod \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.716641 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-nb\") pod \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.716663 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-svc\") pod \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\" (UID: \"8f969273-1c0e-4ac7-aae1-e04cea6c864f\") " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717051 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717065 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717075 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdsp4\" (UniqueName: \"kubernetes.io/projected/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-kube-api-access-wdsp4\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717084 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qbwf\" (UniqueName: \"kubernetes.io/projected/735cd01e-931e-44b0-968d-ebe114278896-kube-api-access-5qbwf\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717094 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717102 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717112 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/735cd01e-931e-44b0-968d-ebe114278896-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717121 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvhb2\" (UniqueName: \"kubernetes.io/projected/fa49318e-e2cd-40c4-910c-3e91feae5e73-kube-api-access-cvhb2\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717130 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13737e91-3ce4-4bcc-a605-d1591596c446-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717139 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa49318e-e2cd-40c4-910c-3e91feae5e73-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717151 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.717159 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt5v5\" (UniqueName: \"kubernetes.io/projected/13737e91-3ce4-4bcc-a605-d1591596c446-kube-api-access-wt5v5\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.718014 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" (UID: "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.725265 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.733687 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.753966 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f969273-1c0e-4ac7-aae1-e04cea6c864f-kube-api-access-rg8g8" (OuterVolumeSpecName: "kube-api-access-rg8g8") pod "8f969273-1c0e-4ac7-aae1-e04cea6c864f" (UID: "8f969273-1c0e-4ac7-aae1-e04cea6c864f"). InnerVolumeSpecName "kube-api-access-rg8g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.821139 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.821184 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.821196 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.821207 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg8g8\" (UniqueName: \"kubernetes.io/projected/8f969273-1c0e-4ac7-aae1-e04cea6c864f-kube-api-access-rg8g8\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.838445 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa49318e-e2cd-40c4-910c-3e91feae5e73" (UID: "fa49318e-e2cd-40c4-910c-3e91feae5e73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:55 crc kubenswrapper[4822]: I1201 07:14:55.929968 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.002154 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance8845-account-delete-rb8lw"] Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.012221 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" (UID: "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.037346 4822 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.056075 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.141219 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.181814 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.185796 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.190716 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.192763 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.192818 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerName="nova-cell0-conductor-conductor" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.196235 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-config" (OuterVolumeSpecName: "config") pod "8f969273-1c0e-4ac7-aae1-e04cea6c864f" (UID: "8f969273-1c0e-4ac7-aae1-e04cea6c864f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.220265 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8f969273-1c0e-4ac7-aae1-e04cea6c864f" (UID: "8f969273-1c0e-4ac7-aae1-e04cea6c864f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.244801 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.244841 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.244861 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.244870 4822 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.244927 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.244967 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data podName:da2985c5-716e-43ad-b892-ea29d88fa639 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:00.244954089 +0000 UTC m=+1455.565761775 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data") pod "rabbitmq-server-0" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639") : configmap "rabbitmq-config-data" not found Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.251140 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.285725 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.298763 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" (UID: "92fc89df-ebe6-45e8-b0f3-4a4d20476ee4"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.329805 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "fa49318e-e2cd-40c4-910c-3e91feae5e73" (UID: "fa49318e-e2cd-40c4-910c-3e91feae5e73"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.344385 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8f969273-1c0e-4ac7-aae1-e04cea6c864f" (UID: "8f969273-1c0e-4ac7-aae1-e04cea6c864f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.346820 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-vencrypt-tls-certs\") pod \"03cbadb0-cfc2-4ade-9e82-efd876532994\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.346989 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf4zb\" (UniqueName: \"kubernetes.io/projected/03cbadb0-cfc2-4ade-9e82-efd876532994-kube-api-access-bf4zb\") pod \"03cbadb0-cfc2-4ade-9e82-efd876532994\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347030 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-nova-novncproxy-tls-certs\") pod \"03cbadb0-cfc2-4ade-9e82-efd876532994\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347100 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-combined-ca-bundle\") pod \"03cbadb0-cfc2-4ade-9e82-efd876532994\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347169 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-config-data\") pod \"03cbadb0-cfc2-4ade-9e82-efd876532994\" (UID: \"03cbadb0-cfc2-4ade-9e82-efd876532994\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347614 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347631 4822 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347640 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347648 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa49318e-e2cd-40c4-910c-3e91feae5e73-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.347657 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.358798 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03cbadb0-cfc2-4ade-9e82-efd876532994-kube-api-access-bf4zb" (OuterVolumeSpecName: "kube-api-access-bf4zb") pod "03cbadb0-cfc2-4ade-9e82-efd876532994" (UID: "03cbadb0-cfc2-4ade-9e82-efd876532994"). InnerVolumeSpecName "kube-api-access-bf4zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.383260 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8f969273-1c0e-4ac7-aae1-e04cea6c864f" (UID: "8f969273-1c0e-4ac7-aae1-e04cea6c864f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.396156 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8f969273-1c0e-4ac7-aae1-e04cea6c864f" (UID: "8f969273-1c0e-4ac7-aae1-e04cea6c864f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.398701 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "13737e91-3ce4-4bcc-a605-d1591596c446" (UID: "13737e91-3ce4-4bcc-a605-d1591596c446"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.431370 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderac9b-account-delete-lmq7r"] Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.448141 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03cbadb0-cfc2-4ade-9e82-efd876532994" (UID: "03cbadb0-cfc2-4ade-9e82-efd876532994"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.449521 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.449539 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/13737e91-3ce4-4bcc-a605-d1591596c446-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.449562 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.449571 4822 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8f969273-1c0e-4ac7-aae1-e04cea6c864f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.449580 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf4zb\" (UniqueName: \"kubernetes.io/projected/03cbadb0-cfc2-4ade-9e82-efd876532994-kube-api-access-bf4zb\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.457294 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican6e4d-account-delete-bz5tp"] Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.477781 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "735cd01e-931e-44b0-968d-ebe114278896" (UID: "735cd01e-931e-44b0-968d-ebe114278896"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.480442 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron40f1-account-delete-rb6h6"] Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.488753 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-config-data" (OuterVolumeSpecName: "config-data") pod "03cbadb0-cfc2-4ade-9e82-efd876532994" (UID: "03cbadb0-cfc2-4ade-9e82-efd876532994"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.505998 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "03cbadb0-cfc2-4ade-9e82-efd876532994" (UID: "03cbadb0-cfc2-4ade-9e82-efd876532994"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.511816 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.514757 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement75d6-account-delete-46gbh"] Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.518692 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "03cbadb0-cfc2-4ade-9e82-efd876532994" (UID: "03cbadb0-cfc2-4ade-9e82-efd876532994"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.529764 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.557259 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmrgv\" (UniqueName: \"kubernetes.io/projected/946e1406-8dc2-445d-9d5a-f801260d554b-kube-api-access-vmrgv\") pod \"946e1406-8dc2-445d-9d5a-f801260d554b\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.557316 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data\") pod \"946e1406-8dc2-445d-9d5a-f801260d554b\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.557385 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-scripts\") pod \"946e1406-8dc2-445d-9d5a-f801260d554b\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.557413 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-combined-ca-bundle\") pod \"946e1406-8dc2-445d-9d5a-f801260d554b\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.557534 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/946e1406-8dc2-445d-9d5a-f801260d554b-etc-machine-id\") pod \"946e1406-8dc2-445d-9d5a-f801260d554b\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.557579 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data-custom\") pod \"946e1406-8dc2-445d-9d5a-f801260d554b\" (UID: \"946e1406-8dc2-445d-9d5a-f801260d554b\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.558071 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.558097 4822 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.558112 4822 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/03cbadb0-cfc2-4ade-9e82-efd876532994-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.558125 4822 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/735cd01e-931e-44b0-968d-ebe114278896-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.561765 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "946e1406-8dc2-445d-9d5a-f801260d554b" (UID: "946e1406-8dc2-445d-9d5a-f801260d554b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.561816 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/946e1406-8dc2-445d-9d5a-f801260d554b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "946e1406-8dc2-445d-9d5a-f801260d554b" (UID: "946e1406-8dc2-445d-9d5a-f801260d554b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.562228 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/946e1406-8dc2-445d-9d5a-f801260d554b-kube-api-access-vmrgv" (OuterVolumeSpecName: "kube-api-access-vmrgv") pod "946e1406-8dc2-445d-9d5a-f801260d554b" (UID: "946e1406-8dc2-445d-9d5a-f801260d554b"). InnerVolumeSpecName "kube-api-access-vmrgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.565944 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-scripts" (OuterVolumeSpecName: "scripts") pod "946e1406-8dc2-445d-9d5a-f801260d554b" (UID: "946e1406-8dc2-445d-9d5a-f801260d554b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.659158 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-etc-swift\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.659225 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-log-httpd\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.659253 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-run-httpd\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.659308 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-config-data\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.671354 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.673734 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.673790 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.659374 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-combined-ca-bundle\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.677241 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bjnx\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-kube-api-access-7bjnx\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.677307 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-internal-tls-certs\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.677364 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-public-tls-certs\") pod \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\" (UID: \"7f721b33-b6df-4e86-90bc-52a7d0c49cbf\") " Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678196 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmrgv\" (UniqueName: \"kubernetes.io/projected/946e1406-8dc2-445d-9d5a-f801260d554b-kube-api-access-vmrgv\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678220 4822 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678235 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678247 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678257 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678270 4822 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/946e1406-8dc2-445d-9d5a-f801260d554b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.678281 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.678351 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:56 crc kubenswrapper[4822]: E1201 07:14:56.687807 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data podName:a1229c08-35a5-4f16-8334-f32bb9b852b6 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:00.687779602 +0000 UTC m=+1456.008587278 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data") pod "rabbitmq-cell1-server-0" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6") : configmap "rabbitmq-cell1-config-data" not found Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.699104 4822 generic.go:334] "Generic (PLEG): container finished" podID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerID="ec6d0903bf7c036cf0d4792cee042afc8037b300b849a72d91c313222cd086a0" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.699166 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56d5c94499-xwfh7" event={"ID":"e2b0531f-a401-46e6-80f0-7f0023e9a0d6","Type":"ContainerDied","Data":"ec6d0903bf7c036cf0d4792cee042afc8037b300b849a72d91c313222cd086a0"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.701323 4822 generic.go:334] "Generic (PLEG): container finished" podID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerID="d67bae7c70f17ec833b424c5adfa870275d1a2d4526a3192bf63ea5af23f096a" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.701362 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a","Type":"ContainerDied","Data":"d67bae7c70f17ec833b424c5adfa870275d1a2d4526a3192bf63ea5af23f096a"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.702968 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8t992_fa49318e-e2cd-40c4-910c-3e91feae5e73/openstack-network-exporter/0.log" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.703035 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8t992" event={"ID":"fa49318e-e2cd-40c4-910c-3e91feae5e73","Type":"ContainerDied","Data":"a42b6c757cf819d4fbef0bd7a4a60def699ba518c97e8564d8133d6a20609e7c"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.703081 4822 scope.go:117] "RemoveContainer" containerID="97d660e529a0aaa28a2d2a3858d4da0114b11b557d1f19a98f4c4659e698d302" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.703078 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8t992" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.705043 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_735cd01e-931e-44b0-968d-ebe114278896/ovsdbserver-sb/0.log" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.705093 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"735cd01e-931e-44b0-968d-ebe114278896","Type":"ContainerDied","Data":"ad310feb175d6d9c6ee5fbc0fc3d797cb49078408c0079fa88833b54376865d1"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.705150 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.725466 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement75d6-account-delete-46gbh" event={"ID":"a794a063-7f9d-4a0f-9cf7-ae70b70769eb","Type":"ContainerStarted","Data":"e8234b092a243bd2f096eec12f5321c7dc8225c8e2ac28de49c5bdb6228d1edb"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.727118 4822 generic.go:334] "Generic (PLEG): container finished" podID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.727172 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerDied","Data":"2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.738186 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-kube-api-access-7bjnx" (OuterVolumeSpecName: "kube-api-access-7bjnx") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "kube-api-access-7bjnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.742114 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6e4d-account-delete-bz5tp" event={"ID":"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf","Type":"ContainerStarted","Data":"050253d8054570e847cc50cf7a63c84bd44c541a39888e0f822ac53cedbed3f6"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.746684 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerID="d9ad4dec5b7ced2f8a0abc30fb3b367a52102c69895a9418a23c7124dd5666eb" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.746749 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" event={"ID":"a1df0a4e-4359-436c-9937-e4af9b500ae5","Type":"ContainerDied","Data":"d9ad4dec5b7ced2f8a0abc30fb3b367a52102c69895a9418a23c7124dd5666eb"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.749323 4822 generic.go:334] "Generic (PLEG): container finished" podID="946e1406-8dc2-445d-9d5a-f801260d554b" containerID="adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.749372 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"946e1406-8dc2-445d-9d5a-f801260d554b","Type":"ContainerDied","Data":"adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.749394 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"946e1406-8dc2-445d-9d5a-f801260d554b","Type":"ContainerDied","Data":"c6e2045f220f67ded2ef2526eb5154e5827a7a90202dabf76ff92719b4d10d5f"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.749450 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.752636 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderac9b-account-delete-lmq7r" event={"ID":"33a2ee0e-4d60-46f9-9f2a-a094af634a64","Type":"ContainerStarted","Data":"d88670111ab794874def4938e40068ce98af57a7651c75d59f3a6419788471df"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.767413 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance8845-account-delete-rb8lw" event={"ID":"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0","Type":"ContainerStarted","Data":"51b8d67ad80c132d1af12a855561278f56330bd6e03769d6b5fdc4c66129804f"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.767457 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance8845-account-delete-rb8lw" event={"ID":"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0","Type":"ContainerStarted","Data":"7a30677c2bd6d59234c989587987e01e381e20c786abdfb4c209036795866348"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.771649 4822 generic.go:334] "Generic (PLEG): container finished" podID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerID="58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.771698 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-685677fbf5-d4ncs" event={"ID":"7f721b33-b6df-4e86-90bc-52a7d0c49cbf","Type":"ContainerDied","Data":"58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.771718 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-685677fbf5-d4ncs" event={"ID":"7f721b33-b6df-4e86-90bc-52a7d0c49cbf","Type":"ContainerDied","Data":"36e01835fa026f3ef7ff9590d2d93686fbe84870121b26ee7c609a74ed071510"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.771773 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-685677fbf5-d4ncs" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.779805 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bjnx\" (UniqueName: \"kubernetes.io/projected/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-kube-api-access-7bjnx\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.782969 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron40f1-account-delete-rb6h6" event={"ID":"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3","Type":"ContainerStarted","Data":"c6814bb8297ec8d8dd45a1928c26602ef62d476aad12a763d0c989b1043fb60b"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.784598 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance8845-account-delete-rb8lw" podStartSLOduration=4.784578753 podStartE2EDuration="4.784578753s" podCreationTimestamp="2025-12-01 07:14:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:14:56.780540031 +0000 UTC m=+1452.101347737" watchObservedRunningTime="2025-12-01 07:14:56.784578753 +0000 UTC m=+1452.105386439" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.787989 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_13737e91-3ce4-4bcc-a605-d1591596c446/ovsdbserver-nb/0.log" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.788049 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"13737e91-3ce4-4bcc-a605-d1591596c446","Type":"ContainerDied","Data":"536960d21bed17fa788b37641e7e72c8b8bf72204717961977574d0a30be0e39"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.788122 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.793969 4822 generic.go:334] "Generic (PLEG): container finished" podID="03cbadb0-cfc2-4ade-9e82-efd876532994" containerID="e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f" exitCode=0 Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.794028 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"03cbadb0-cfc2-4ade-9e82-efd876532994","Type":"ContainerDied","Data":"e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.794053 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"03cbadb0-cfc2-4ade-9e82-efd876532994","Type":"ContainerDied","Data":"0c58259b1ab28e6448f9db53319a670cfb6a1020523336eee853402052d9bb89"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.794098 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.796862 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" event={"ID":"8f969273-1c0e-4ac7-aae1-e04cea6c864f","Type":"ContainerDied","Data":"33f99ab0ecce4d1a9d3c67881a8de41dcec841780ac802e261f6e967d6e2800a"} Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.796923 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-zlscm" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.861848 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "946e1406-8dc2-445d-9d5a-f801260d554b" (UID: "946e1406-8dc2-445d-9d5a-f801260d554b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.875634 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data" (OuterVolumeSpecName: "config-data") pod "946e1406-8dc2-445d-9d5a-f801260d554b" (UID: "946e1406-8dc2-445d-9d5a-f801260d554b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.881301 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.881327 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946e1406-8dc2-445d-9d5a-f801260d554b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.929007 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-config-data" (OuterVolumeSpecName: "config-data") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.942945 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell04972-account-delete-qmqf7"] Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.967456 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.985392 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:56 crc kubenswrapper[4822]: I1201 07:14:56.988984 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.038970 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.042863 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69ebc96d-4310-46ec-a237-9f299e08dc64" path="/var/lib/kubelet/pods/69ebc96d-4310-46ec-a237-9f299e08dc64/volumes" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.043937 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92fc89df-ebe6-45e8-b0f3-4a4d20476ee4" path="/var/lib/kubelet/pods/92fc89df-ebe6-45e8-b0f3-4a4d20476ee4/volumes" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.044479 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b30c0ac5-fe50-4055-b46c-49928b99d337" path="/var/lib/kubelet/pods/b30c0ac5-fe50-4055-b46c-49928b99d337/volumes" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.044665 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7f721b33-b6df-4e86-90bc-52a7d0c49cbf" (UID: "7f721b33-b6df-4e86-90bc-52a7d0c49cbf"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.067127 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi33d4-account-delete-5vtpn"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.091069 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.091093 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f721b33-b6df-4e86-90bc-52a7d0c49cbf-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.178247 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": read tcp 10.217.0.2:49126->10.217.0.164:8776: read: connection reset by peer" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.345194 4822 scope.go:117] "RemoveContainer" containerID="fd4e18f76e0e6f5823923b10ab11728a84798dceeb11a8364e726a5f18115231" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.387743 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.396646 4822 scope.go:117] "RemoveContainer" containerID="d492a8fc847ccb6927b247e9b2bcc137ee2203221c147de374c232b92714bb3a" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.446668 4822 scope.go:117] "RemoveContainer" containerID="619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.519512 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.519732 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-default\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.519827 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-combined-ca-bundle\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.519865 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-galera-tls-certs\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.519886 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.520010 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tmwf\" (UniqueName: \"kubernetes.io/projected/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kube-api-access-7tmwf\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.520071 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-generated\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.520096 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kolla-config\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.520171 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-operator-scripts\") pod \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\" (UID: \"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.521051 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.521078 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.521285 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.521829 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.526844 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kube-api-access-7tmwf" (OuterVolumeSpecName: "kube-api-access-7tmwf") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "kube-api-access-7tmwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.577831 4822 scope.go:117] "RemoveContainer" containerID="adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.590837 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.607246 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "mysql-db") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.619090 4822 scope.go:117] "RemoveContainer" containerID="619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd" Dec 01 07:14:57 crc kubenswrapper[4822]: E1201 07:14:57.619621 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd\": container with ID starting with 619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd not found: ID does not exist" containerID="619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.619652 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd"} err="failed to get container status \"619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd\": rpc error: code = NotFound desc = could not find container \"619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd\": container with ID starting with 619b1a8f29b69c6986b2f24583336f155bf7fc1ae49baa0f0f3036ed9dc64ddd not found: ID does not exist" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.619674 4822 scope.go:117] "RemoveContainer" containerID="adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060" Dec 01 07:14:57 crc kubenswrapper[4822]: E1201 07:14:57.620004 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060\": container with ID starting with adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060 not found: ID does not exist" containerID="adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.620026 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060"} err="failed to get container status \"adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060\": rpc error: code = NotFound desc = could not find container \"adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060\": container with ID starting with adb10bdadc7e10daca9958ba672d3110ff0e678fd50302d65dcc9fa525b18060 not found: ID does not exist" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.620044 4822 scope.go:117] "RemoveContainer" containerID="58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621322 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-combined-ca-bundle\") pod \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621356 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlnpl\" (UniqueName: \"kubernetes.io/projected/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-kube-api-access-tlnpl\") pod \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621392 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data\") pod \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621432 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data-custom\") pod \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621452 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-logs\") pod \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\" (UID: \"e2b0531f-a401-46e6-80f0-7f0023e9a0d6\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621926 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621946 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tmwf\" (UniqueName: \"kubernetes.io/projected/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kube-api-access-7tmwf\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621958 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621967 4822 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621976 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.621985 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.623710 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.626636 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-logs" (OuterVolumeSpecName: "logs") pod "e2b0531f-a401-46e6-80f0-7f0023e9a0d6" (UID: "e2b0531f-a401-46e6-80f0-7f0023e9a0d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.652105 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-kube-api-access-tlnpl" (OuterVolumeSpecName: "kube-api-access-tlnpl") pod "e2b0531f-a401-46e6-80f0-7f0023e9a0d6" (UID: "e2b0531f-a401-46e6-80f0-7f0023e9a0d6"). InnerVolumeSpecName "kube-api-access-tlnpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.653612 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e2b0531f-a401-46e6-80f0-7f0023e9a0d6" (UID: "e2b0531f-a401-46e6-80f0-7f0023e9a0d6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.662256 4822 scope.go:117] "RemoveContainer" containerID="34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.668522 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-zlscm"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.674724 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-zlscm"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.689697 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-8t992"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.704951 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-8t992"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.724358 4822 scope.go:117] "RemoveContainer" containerID="58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.724832 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1df0a4e-4359-436c-9937-e4af9b500ae5-logs\") pod \"a1df0a4e-4359-436c-9937-e4af9b500ae5\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.724949 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data-custom\") pod \"a1df0a4e-4359-436c-9937-e4af9b500ae5\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725069 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data\") pod \"a1df0a4e-4359-436c-9937-e4af9b500ae5\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725126 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-combined-ca-bundle\") pod \"a1df0a4e-4359-436c-9937-e4af9b500ae5\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725158 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5x29v\" (UniqueName: \"kubernetes.io/projected/a1df0a4e-4359-436c-9937-e4af9b500ae5-kube-api-access-5x29v\") pod \"a1df0a4e-4359-436c-9937-e4af9b500ae5\" (UID: \"a1df0a4e-4359-436c-9937-e4af9b500ae5\") " Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725524 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725541 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlnpl\" (UniqueName: \"kubernetes.io/projected/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-kube-api-access-tlnpl\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725572 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.725580 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.731599 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1df0a4e-4359-436c-9937-e4af9b500ae5-logs" (OuterVolumeSpecName: "logs") pod "a1df0a4e-4359-436c-9937-e4af9b500ae5" (UID: "a1df0a4e-4359-436c-9937-e4af9b500ae5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: E1201 07:14:57.734243 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60\": container with ID starting with 58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60 not found: ID does not exist" containerID="58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.734289 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60"} err="failed to get container status \"58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60\": rpc error: code = NotFound desc = could not find container \"58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60\": container with ID starting with 58f44f40ff419092afb90c0b905b71e0bf707adadc072f160fd4910e0ef03d60 not found: ID does not exist" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.734456 4822 scope.go:117] "RemoveContainer" containerID="34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0" Dec 01 07:14:57 crc kubenswrapper[4822]: E1201 07:14:57.739269 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0\": container with ID starting with 34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0 not found: ID does not exist" containerID="34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.739312 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0"} err="failed to get container status \"34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0\": rpc error: code = NotFound desc = could not find container \"34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0\": container with ID starting with 34e8852b805ed24c0b2ec819ae87a1a0d7964731a8070dc91d28e4f4c7276ec0 not found: ID does not exist" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.739348 4822 scope.go:117] "RemoveContainer" containerID="399ace0799f2c11abaf11cd88fb9941adf1ed68dea1ffe69cd7f58d95ad171e4" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.761808 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a1df0a4e-4359-436c-9937-e4af9b500ae5" (UID: "a1df0a4e-4359-436c-9937-e4af9b500ae5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.767662 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.770430 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1df0a4e-4359-436c-9937-e4af9b500ae5-kube-api-access-5x29v" (OuterVolumeSpecName: "kube-api-access-5x29v") pod "a1df0a4e-4359-436c-9937-e4af9b500ae5" (UID: "a1df0a4e-4359-436c-9937-e4af9b500ae5"). InnerVolumeSpecName "kube-api-access-5x29v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.776812 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.783886 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.795448 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.802248 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.809615 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.815441 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" event={"ID":"a1df0a4e-4359-436c-9937-e4af9b500ae5","Type":"ContainerDied","Data":"18f3ecfd0c70308cc1b03a641ada8603d9bbe1e329d3b6fa31b513a75df8afd4"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.815610 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.819581 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi33d4-account-delete-5vtpn" event={"ID":"b1312168-fba0-46d6-8ca3-346303262924","Type":"ContainerStarted","Data":"a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.819618 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi33d4-account-delete-5vtpn" event={"ID":"b1312168-fba0-46d6-8ca3-346303262924","Type":"ContainerStarted","Data":"e6c7110e8325f7235ac87d26637cb57cdead35db732b81cae4f2dc8de8fe8b19"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.824140 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-685677fbf5-d4ncs"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.833968 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-685677fbf5-d4ncs"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.834514 4822 generic.go:334] "Generic (PLEG): container finished" podID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerID="da2877c86b5adb431c13ff28bb62c50e650b56f0e6e01accc5c938c8b253a36c" exitCode=0 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.834581 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3","Type":"ContainerDied","Data":"da2877c86b5adb431c13ff28bb62c50e650b56f0e6e01accc5c938c8b253a36c"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.834603 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3","Type":"ContainerDied","Data":"89d9570caf317d01127d51a05da6049d9afacaf1d133effd375fd9042ecaf7b8"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.834614 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89d9570caf317d01127d51a05da6049d9afacaf1d133effd375fd9042ecaf7b8" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.838727 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.839769 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5x29v\" (UniqueName: \"kubernetes.io/projected/a1df0a4e-4359-436c-9937-e4af9b500ae5-kube-api-access-5x29v\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.839790 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1df0a4e-4359-436c-9937-e4af9b500ae5-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.839800 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.847671 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.847854 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapi33d4-account-delete-5vtpn" podStartSLOduration=4.847841907 podStartE2EDuration="4.847841907s" podCreationTimestamp="2025-12-01 07:14:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:14:57.838544549 +0000 UTC m=+1453.159367366" watchObservedRunningTime="2025-12-01 07:14:57.847841907 +0000 UTC m=+1453.168649593" Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.853113 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04972-account-delete-qmqf7" event={"ID":"a3f05675-879d-4586-af90-8aa6b11ad8a3","Type":"ContainerStarted","Data":"0b161634f276a0b4d892f2bb8e27c8e6f2449fab5e657e2c3ce5ddabe9fb59b3"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.858796 4822 generic.go:334] "Generic (PLEG): container finished" podID="33a2ee0e-4d60-46f9-9f2a-a094af634a64" containerID="6b2321968780485e0490e50ec7771365e998c149930db8256f329ca62769e973" exitCode=0 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.858935 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderac9b-account-delete-lmq7r" event={"ID":"33a2ee0e-4d60-46f9-9f2a-a094af634a64","Type":"ContainerDied","Data":"6b2321968780485e0490e50ec7771365e998c149930db8256f329ca62769e973"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.867501 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.867767 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-central-agent" containerID="cri-o://232bfa3723a8e9ea8d1812e63175355599297eaec99462bd5a8dbce62dfd5a9f" gracePeriod=30 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.867885 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="proxy-httpd" containerID="cri-o://ee62047f058f3d3b037c7b5c164a2486cf6fd7832ad234039229326d5da5cb6e" gracePeriod=30 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.867923 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="sg-core" containerID="cri-o://6f031e1b4b2e282b4cd48d5d64210b11a8231bd55a4051a1a21da63d57205bab" gracePeriod=30 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.867951 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-notification-agent" containerID="cri-o://95d7cb1b03cca33094490f26ae44fa9b54c6204f312055eaf24990307abf0a3d" gracePeriod=30 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.873144 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron40f1-account-delete-rb6h6" event={"ID":"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3","Type":"ContainerStarted","Data":"f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.882320 4822 generic.go:334] "Generic (PLEG): container finished" podID="11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" containerID="51b8d67ad80c132d1af12a855561278f56330bd6e03769d6b5fdc4c66129804f" exitCode=0 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.882496 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance8845-account-delete-rb8lw" event={"ID":"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0","Type":"ContainerDied","Data":"51b8d67ad80c132d1af12a855561278f56330bd6e03769d6b5fdc4c66129804f"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.948147 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.948385 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="8e079db6-a9e0-464b-b99d-57887190a5b8" containerName="kube-state-metrics" containerID="cri-o://52b80d1f9eb289b2321d023ee74702200d73e7e0bcf824e5f0e71860cdbd014f" gracePeriod=30 Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.967958 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56d5c94499-xwfh7" event={"ID":"e2b0531f-a401-46e6-80f0-7f0023e9a0d6","Type":"ContainerDied","Data":"9ec8e591da8b03e1ee0179291bdb1a7cdb3b03f0d732e26febaced297545314f"} Dec 01 07:14:57 crc kubenswrapper[4822]: I1201 07:14:57.968165 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56d5c94499-xwfh7" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.006043 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.006703 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"50ff5a68-493b-4e62-9e88-a8f1d0b4d78a","Type":"ContainerDied","Data":"db0ec99f3583f720a0f43a152aaed9bf3fa0b08d2ff592581a36a0a984a1688f"} Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.009692 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron40f1-account-delete-rb6h6" podStartSLOduration=5.009669998 podStartE2EDuration="5.009669998s" podCreationTimestamp="2025-12-01 07:14:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:14:57.94002252 +0000 UTC m=+1453.260830206" watchObservedRunningTime="2025-12-01 07:14:58.009669998 +0000 UTC m=+1453.330477684" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.035907 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6e4d-account-delete-bz5tp" event={"ID":"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf","Type":"ContainerStarted","Data":"f05b963fa90e51f441cfec47cbaa1a3c1e77f0b689982aacfe5510b1a4ea7520"} Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.052442 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.055087 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" containerName="memcached" containerID="cri-o://9d2642fba08e82f7fab50725c4908dd7f41042b114c66469823905f1b4131ec5" gracePeriod=30 Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.093980 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-t6rkn"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.109254 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.119103 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican6e4d-account-delete-bz5tp" podStartSLOduration=6.119082728 podStartE2EDuration="6.119082728s" podCreationTimestamp="2025-12-01 07:14:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:14:58.070672438 +0000 UTC m=+1453.391480124" watchObservedRunningTime="2025-12-01 07:14:58.119082728 +0000 UTC m=+1453.439890414" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.158075 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.158502 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.158564 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:14:58.658534941 +0000 UTC m=+1453.979342627 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.197158 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-t6rkn"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.203974 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1df0a4e-4359-436c-9937-e4af9b500ae5" (UID: "a1df0a4e-4359-436c-9937-e4af9b500ae5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.228527 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-n4pm7"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.296451 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.297079 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-n4pm7"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.302244 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.310290 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5875588964-pg9h2"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.310570 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-5875588964-pg9h2" podUID="b422e423-4e50-4e96-a341-d7bb5188c4af" containerName="keystone-api" containerID="cri-o://65c8f8c2ad754b11428eef89af4d4e06df73f318c729e1210811528d1ba1f58d" gracePeriod=30 Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.330256 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.337647 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-fv5zp"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.344941 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-fv5zp"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.376479 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-75d6-account-create-update-qqkzs"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.387598 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-75d6-account-create-update-qqkzs"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.392051 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.397755 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement75d6-account-delete-46gbh"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.408257 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-2t96f"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.408313 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-2t96f"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.413280 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-ef00-account-create-update-h4cql"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.419758 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-ef00-account-create-update-h4cql"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.426655 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-cxrh8"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.434039 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-cxrh8"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.450446 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderac9b-account-delete-lmq7r"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.468214 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:36806->10.217.0.163:9311: read: connection reset by peer" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.468582 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:36790->10.217.0.163:9311: read: connection reset by peer" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.470056 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:53296->10.217.0.200:8775: read: connection reset by peer" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.470159 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:53282->10.217.0.200:8775: read: connection reset by peer" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.479268 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-ac9b-account-create-update-khj8h"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.501256 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-ac9b-account-create-update-khj8h"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.516935 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-tdc4c"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.528193 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-tdc4c"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.541574 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron40f1-account-delete-rb6h6"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.548335 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-40f1-account-create-update-frwxk"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.559578 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-40f1-account-create-update-frwxk"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.567605 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-rsm9w"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.576646 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-rsm9w"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.581593 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell04972-account-delete-qmqf7"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.590587 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-4972-account-create-update-962bd"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.599977 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-4972-account-create-update-962bd"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.643712 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2b0531f-a401-46e6-80f0-7f0023e9a0d6" (UID: "e2b0531f-a401-46e6-80f0-7f0023e9a0d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.646689 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" (UID: "50ff5a68-493b-4e62-9e88-a8f1d0b4d78a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.683478 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.690019 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.692368 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.692401 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="ovn-northd" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.722869 4822 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.722903 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.722985 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:58 crc kubenswrapper[4822]: E1201 07:14:58.723038 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:14:59.723020553 +0000 UTC m=+1455.043828249 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.790860 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-g5k9q"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.796360 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data" (OuterVolumeSpecName: "config-data") pod "a1df0a4e-4359-436c-9937-e4af9b500ae5" (UID: "a1df0a4e-4359-436c-9937-e4af9b500ae5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.804211 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-g5k9q"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.809357 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data" (OuterVolumeSpecName: "config-data") pod "e2b0531f-a401-46e6-80f0-7f0023e9a0d6" (UID: "e2b0531f-a401-46e6-80f0-7f0023e9a0d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.834701 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1df0a4e-4359-436c-9937-e4af9b500ae5-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.834739 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2b0531f-a401-46e6-80f0-7f0023e9a0d6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.869607 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-33d4-account-create-update-ng7q9"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.871201 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi33d4-account-delete-5vtpn"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.914439 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-33d4-account-create-update-ng7q9"] Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.942331 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.177:9292/healthcheck\": dial tcp 10.217.0.177:9292: connect: connection refused" Dec 01 07:14:58 crc kubenswrapper[4822]: I1201 07:14:58.942794 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.177:9292/healthcheck\": dial tcp 10.217.0.177:9292: connect: connection refused" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.005785 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03cbadb0-cfc2-4ade-9e82-efd876532994" path="/var/lib/kubelet/pods/03cbadb0-cfc2-4ade-9e82-efd876532994/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.006499 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" path="/var/lib/kubelet/pods/13737e91-3ce4-4bcc-a605-d1591596c446/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.007162 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="141fe1bc-0085-48cc-a283-f8596ff7240a" path="/var/lib/kubelet/pods/141fe1bc-0085-48cc-a283-f8596ff7240a/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.008184 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14899afe-5dfb-4985-bff9-f5d1611efb1d" path="/var/lib/kubelet/pods/14899afe-5dfb-4985-bff9-f5d1611efb1d/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.008757 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c1e0d17-7093-464d-ae8f-6b483a28558b" path="/var/lib/kubelet/pods/1c1e0d17-7093-464d-ae8f-6b483a28558b/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.009309 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="246a1687-b876-4135-9a78-5aea28bd9663" path="/var/lib/kubelet/pods/246a1687-b876-4135-9a78-5aea28bd9663/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.010327 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3aa3420d-c277-4e32-be0b-4e7c4b7a7f76" path="/var/lib/kubelet/pods/3aa3420d-c277-4e32-be0b-4e7c4b7a7f76/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.011780 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f69aa7d-6165-42fb-a44e-1c0a25207b7c" path="/var/lib/kubelet/pods/3f69aa7d-6165-42fb-a44e-1c0a25207b7c/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.012687 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="735cd01e-931e-44b0-968d-ebe114278896" path="/var/lib/kubelet/pods/735cd01e-931e-44b0-968d-ebe114278896/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.014214 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" path="/var/lib/kubelet/pods/7f721b33-b6df-4e86-90bc-52a7d0c49cbf/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.014925 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77" path="/var/lib/kubelet/pods/8ddaa4a4-d5dc-4046-89b8-6a335bfd6e77/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.015446 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" path="/var/lib/kubelet/pods/8f969273-1c0e-4ac7-aae1-e04cea6c864f/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.017432 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" path="/var/lib/kubelet/pods/946e1406-8dc2-445d-9d5a-f801260d554b/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.020108 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95469730-4c0e-45f8-b654-26449f12da7d" path="/var/lib/kubelet/pods/95469730-4c0e-45f8-b654-26449f12da7d/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.020893 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a630d16e-8baf-4fbd-851d-ae5c49baf062" path="/var/lib/kubelet/pods/a630d16e-8baf-4fbd-851d-ae5c49baf062/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.022043 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7d41549-25ee-4695-a0fb-0db75a1d2238" path="/var/lib/kubelet/pods/b7d41549-25ee-4695-a0fb-0db75a1d2238/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.022801 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be2b75bc-27b6-4e33-8f17-0c30b4512014" path="/var/lib/kubelet/pods/be2b75bc-27b6-4e33-8f17-0c30b4512014/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.023514 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd4e4bde-d294-4379-b394-44d4b43371bc" path="/var/lib/kubelet/pods/cd4e4bde-d294-4379-b394-44d4b43371bc/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.027747 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7060e1b-a0a1-4403-a0f3-6fb1ffff308a" path="/var/lib/kubelet/pods/e7060e1b-a0a1-4403-a0f3-6fb1ffff308a/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.029198 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f14733ce-3da3-40ad-97d6-b9ce628590ec" path="/var/lib/kubelet/pods/f14733ce-3da3-40ad-97d6-b9ce628590ec/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.029737 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa49318e-e2cd-40c4-910c-3e91feae5e73" path="/var/lib/kubelet/pods/fa49318e-e2cd-40c4-910c-3e91feae5e73/volumes" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.067948 4822 generic.go:334] "Generic (PLEG): container finished" podID="8e079db6-a9e0-464b-b99d-57887190a5b8" containerID="52b80d1f9eb289b2321d023ee74702200d73e7e0bcf824e5f0e71860cdbd014f" exitCode=2 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.068025 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8e079db6-a9e0-464b-b99d-57887190a5b8","Type":"ContainerDied","Data":"52b80d1f9eb289b2321d023ee74702200d73e7e0bcf824e5f0e71860cdbd014f"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.081401 4822 generic.go:334] "Generic (PLEG): container finished" podID="b725dbb9-8785-43a1-9f35-215938938f6e" containerID="53ee8c277f9abfc79fafeffabdd86dd7daf8f3652232dfbf0acccc1cc46201b6" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.081465 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b725dbb9-8785-43a1-9f35-215938938f6e","Type":"ContainerDied","Data":"53ee8c277f9abfc79fafeffabdd86dd7daf8f3652232dfbf0acccc1cc46201b6"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.085635 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement75d6-account-delete-46gbh" event={"ID":"a794a063-7f9d-4a0f-9cf7-ae70b70769eb","Type":"ContainerStarted","Data":"2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.086103 4822 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/placement75d6-account-delete-46gbh" secret="" err="secret \"galera-openstack-dockercfg-8tfbd\" not found" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.090930 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="galera" containerID="cri-o://bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453" gracePeriod=30 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.092018 4822 generic.go:334] "Generic (PLEG): container finished" podID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerID="b3c842a2cb463bfede65789670bfcdeca18f9089a409fd8da36121671752306c" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.092059 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4508389c-d1b8-4646-902e-4fbb597de2b7","Type":"ContainerDied","Data":"b3c842a2cb463bfede65789670bfcdeca18f9089a409fd8da36121671752306c"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.093900 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04972-account-delete-qmqf7" event={"ID":"a3f05675-879d-4586-af90-8aa6b11ad8a3","Type":"ContainerStarted","Data":"ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.093988 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell04972-account-delete-qmqf7" podUID="a3f05675-879d-4586-af90-8aa6b11ad8a3" containerName="mariadb-account-delete" containerID="cri-o://ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2" gracePeriod=30 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.115134 4822 generic.go:334] "Generic (PLEG): container finished" podID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerID="2737b375be318a62945c13daa6a442680918aa2addf2fb31b4a1be4501d00b4c" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.115194 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3076b4c6-b401-48a5-8343-a34d9c979ea3","Type":"ContainerDied","Data":"2737b375be318a62945c13daa6a442680918aa2addf2fb31b4a1be4501d00b4c"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.127466 4822 generic.go:334] "Generic (PLEG): container finished" podID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerID="3e945f46d94da2037046bdb5e78c0bab8aeb085af05bb39debf197f614a96054" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.127573 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" event={"ID":"4f5cf9f6-a48b-455b-aef3-952697eb1a09","Type":"ContainerDied","Data":"3e945f46d94da2037046bdb5e78c0bab8aeb085af05bb39debf197f614a96054"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.154081 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement75d6-account-delete-46gbh" podStartSLOduration=6.15405671 podStartE2EDuration="6.15405671s" podCreationTimestamp="2025-12-01 07:14:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:14:59.105442054 +0000 UTC m=+1454.426249740" watchObservedRunningTime="2025-12-01 07:14:59.15405671 +0000 UTC m=+1454.474864396" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.190920 4822 generic.go:334] "Generic (PLEG): container finished" podID="74508f35-c5cd-4e07-8883-831d2de65f35" containerID="70b0818800c2616bc33b123c3a4baa1c0af71f6cf5e88867c893507114ae9b3c" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.191005 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"74508f35-c5cd-4e07-8883-831d2de65f35","Type":"ContainerDied","Data":"70b0818800c2616bc33b123c3a4baa1c0af71f6cf5e88867c893507114ae9b3c"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.236819 4822 generic.go:334] "Generic (PLEG): container finished" podID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerID="6f031e1b4b2e282b4cd48d5d64210b11a8231bd55a4051a1a21da63d57205bab" exitCode=2 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.236850 4822 generic.go:334] "Generic (PLEG): container finished" podID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerID="232bfa3723a8e9ea8d1812e63175355599297eaec99462bd5a8dbce62dfd5a9f" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.236919 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerDied","Data":"6f031e1b4b2e282b4cd48d5d64210b11a8231bd55a4051a1a21da63d57205bab"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.236951 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerDied","Data":"232bfa3723a8e9ea8d1812e63175355599297eaec99462bd5a8dbce62dfd5a9f"} Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.255467 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.255515 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts podName:a794a063-7f9d-4a0f-9cf7-ae70b70769eb nodeName:}" failed. No retries permitted until 2025-12-01 07:14:59.755500949 +0000 UTC m=+1455.076308635 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts") pod "placement75d6-account-delete-46gbh" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.303844 4822 generic.go:334] "Generic (PLEG): container finished" podID="020e76da-9968-4212-a34c-c01c8f8979de" containerID="42a4de3b243b1977444983268e3455770ea130dc481d53f206e1f70bf6eac99c" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.303930 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55d788fdd6-vxlcs" event={"ID":"020e76da-9968-4212-a34c-c01c8f8979de","Type":"ContainerDied","Data":"42a4de3b243b1977444983268e3455770ea130dc481d53f206e1f70bf6eac99c"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.303954 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55d788fdd6-vxlcs" event={"ID":"020e76da-9968-4212-a34c-c01c8f8979de","Type":"ContainerDied","Data":"b09b42c38ff49cec176bd20610570cb47b55062ed4275969a301088136795b7b"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.303967 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b09b42c38ff49cec176bd20610570cb47b55062ed4275969a301088136795b7b" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.334772 4822 generic.go:334] "Generic (PLEG): container finished" podID="baab33a8-22b8-4097-8c91-73d5f005fdf7" containerID="9d2642fba08e82f7fab50725c4908dd7f41042b114c66469823905f1b4131ec5" exitCode=0 Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.335034 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"baab33a8-22b8-4097-8c91-73d5f005fdf7","Type":"ContainerDied","Data":"9d2642fba08e82f7fab50725c4908dd7f41042b114c66469823905f1b4131ec5"} Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.335432 4822 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi33d4-account-delete-5vtpn" secret="" err="secret \"galera-openstack-dockercfg-8tfbd\" not found" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.336011 4822 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutron40f1-account-delete-rb6h6" secret="" err="secret \"galera-openstack-dockercfg-8tfbd\" not found" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.337892 4822 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican6e4d-account-delete-bz5tp" secret="" err="secret \"galera-openstack-dockercfg-8tfbd\" not found" Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.462766 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.463499 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.463576 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts podName:b1312168-fba0-46d6-8ca3-346303262924 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:59.963541271 +0000 UTC m=+1455.284348957 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts") pod "novaapi33d4-account-delete-5vtpn" (UID: "b1312168-fba0-46d6-8ca3-346303262924") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.463601 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts podName:b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3 nodeName:}" failed. No retries permitted until 2025-12-01 07:14:59.963593722 +0000 UTC m=+1455.284401488 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts") pod "neutron40f1-account-delete-rb6h6" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.659423 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066 is running failed: container process not found" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.659837 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066 is running failed: container process not found" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.660014 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066 is running failed: container process not found" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.660146 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" containerName="nova-cell1-conductor-conductor" Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.768063 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.768415 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts podName:a794a063-7f9d-4a0f-9cf7-ae70b70769eb nodeName:}" failed. No retries permitted until 2025-12-01 07:15:00.768384923 +0000 UTC m=+1456.089192609 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts") pod "placement75d6-account-delete-46gbh" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.768697 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.768728 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:15:01.768718222 +0000 UTC m=+1457.089525908 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.829471 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.837471 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.845752 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.845830 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="galera" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.944920 4822 scope.go:117] "RemoveContainer" containerID="47987ba4418d28d6c596deb0f84178af4b52de7645b18ebeecc2505db58b4d61" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.950397 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.975377 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.981059 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell04972-account-delete-qmqf7" podStartSLOduration=6.981030131 podStartE2EDuration="6.981030131s" podCreationTimestamp="2025-12-01 07:14:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:14:59.127052492 +0000 UTC m=+1454.447860178" watchObservedRunningTime="2025-12-01 07:14:59.981030131 +0000 UTC m=+1455.301837837" Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.993414 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.993481 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts podName:b1312168-fba0-46d6-8ca3-346303262924 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:00.993467626 +0000 UTC m=+1456.314275312 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts") pod "novaapi33d4-account-delete-5vtpn" (UID: "b1312168-fba0-46d6-8ca3-346303262924") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.993518 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: E1201 07:14:59.993560 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts podName:b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:00.993538748 +0000 UTC m=+1456.314346434 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts") pod "neutron40f1-account-delete-rb6h6" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3") : configmap "openstack-scripts" not found Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.997000 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:14:59 crc kubenswrapper[4822]: I1201 07:14:59.997402 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.004711 4822 scope.go:117] "RemoveContainer" containerID="e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.012712 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.085186 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-56d5c94499-xwfh7"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.093318 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-56d5c94499-xwfh7"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094033 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-config-data\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094170 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg52x\" (UniqueName: \"kubernetes.io/projected/3076b4c6-b401-48a5-8343-a34d9c979ea3-kube-api-access-zg52x\") pod \"3076b4c6-b401-48a5-8343-a34d9c979ea3\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094247 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/020e76da-9968-4212-a34c-c01c8f8979de-logs\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094377 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data-custom\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094458 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-internal-tls-certs\") pod \"3076b4c6-b401-48a5-8343-a34d9c979ea3\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094563 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvrsm\" (UniqueName: \"kubernetes.io/projected/020e76da-9968-4212-a34c-c01c8f8979de-kube-api-access-kvrsm\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094721 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-config-data\") pod \"3076b4c6-b401-48a5-8343-a34d9c979ea3\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094798 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-public-tls-certs\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094860 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-etc-machine-id\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.094955 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-combined-ca-bundle\") pod \"8e079db6-a9e0-464b-b99d-57887190a5b8\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095032 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-combined-ca-bundle\") pod \"3076b4c6-b401-48a5-8343-a34d9c979ea3\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095086 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/020e76da-9968-4212-a34c-c01c8f8979de-logs" (OuterVolumeSpecName: "logs") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095099 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-public-tls-certs\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095202 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-internal-tls-certs\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095251 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-public-tls-certs\") pod \"3076b4c6-b401-48a5-8343-a34d9c979ea3\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095282 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3076b4c6-b401-48a5-8343-a34d9c979ea3-logs\") pod \"3076b4c6-b401-48a5-8343-a34d9c979ea3\" (UID: \"3076b4c6-b401-48a5-8343-a34d9c979ea3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095307 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-combined-ca-bundle\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095352 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-config\") pod \"8e079db6-a9e0-464b-b99d-57887190a5b8\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095393 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-scripts\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095431 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-internal-tls-certs\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095469 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhhlr\" (UniqueName: \"kubernetes.io/projected/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-kube-api-access-rhhlr\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095500 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-scripts\") pod \"020e76da-9968-4212-a34c-c01c8f8979de\" (UID: \"020e76da-9968-4212-a34c-c01c8f8979de\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095532 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-logs\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095584 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgk5v\" (UniqueName: \"kubernetes.io/projected/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-api-access-pgk5v\") pod \"8e079db6-a9e0-464b-b99d-57887190a5b8\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095611 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095636 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-certs\") pod \"8e079db6-a9e0-464b-b99d-57887190a5b8\" (UID: \"8e079db6-a9e0-464b-b99d-57887190a5b8\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.095682 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-combined-ca-bundle\") pod \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\" (UID: \"22f94321-d0ce-48f5-82fa-a0b60b5b1dd3\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.096532 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/020e76da-9968-4212-a34c-c01c8f8979de-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.098313 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-logs" (OuterVolumeSpecName: "logs") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.102748 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.107839 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.107935 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.108983 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3076b4c6-b401-48a5-8343-a34d9c979ea3-logs" (OuterVolumeSpecName: "logs") pod "3076b4c6-b401-48a5-8343-a34d9c979ea3" (UID: "3076b4c6-b401-48a5-8343-a34d9c979ea3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.136696 4822 scope.go:117] "RemoveContainer" containerID="e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.136863 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.141350 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f\": container with ID starting with e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f not found: ID does not exist" containerID="e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.155766 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f"} err="failed to get container status \"e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f\": rpc error: code = NotFound desc = could not find container \"e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f\": container with ID starting with e52e55c216358c15b8494de19362694ef9cbc80fa52d0f850668447941e2175f not found: ID does not exist" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.155816 4822 scope.go:117] "RemoveContainer" containerID="29cb191f67cb3d995ae68c5f80b0794867e6eb356848867309a88a2a42dd2ba1" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.142090 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-scripts" (OuterVolumeSpecName: "scripts") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.148649 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-scripts" (OuterVolumeSpecName: "scripts") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.148055 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.154816 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156758 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa49318e-e2cd-40c4-910c-3e91feae5e73" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156774 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa49318e-e2cd-40c4-910c-3e91feae5e73" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156805 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156811 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156822 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-server" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156828 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-server" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156836 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-httpd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156842 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-httpd" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156851 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerName="mysql-bootstrap" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156856 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerName="mysql-bootstrap" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156884 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156890 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-api" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156905 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156911 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156919 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e079db6-a9e0-464b-b99d-57887190a5b8" containerName="kube-state-metrics" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156925 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e079db6-a9e0-464b-b99d-57887190a5b8" containerName="kube-state-metrics" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156935 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerName="dnsmasq-dns" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156962 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerName="dnsmasq-dns" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156972 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="cinder-scheduler" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156978 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="cinder-scheduler" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.156989 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerName="galera" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.156994 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerName="galera" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157002 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157008 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157020 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157045 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener-log" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157055 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="probe" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157060 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="probe" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157074 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157080 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157089 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157095 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-api" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157102 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="ovsdbserver-nb" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157125 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="ovsdbserver-nb" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157138 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157145 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157153 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="ovsdbserver-sb" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157159 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="ovsdbserver-sb" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157167 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-httpd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157173 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-httpd" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157180 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cbadb0-cfc2-4ade-9e82-efd876532994" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157186 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cbadb0-cfc2-4ade-9e82-efd876532994" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157216 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerName="init" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157221 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerName="init" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157230 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157236 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker-log" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157246 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157251 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157260 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157266 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157296 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157303 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157312 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157317 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-log" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157324 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157329 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.157338 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.157344 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158193 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e079db6-a9e0-464b-b99d-57887190a5b8" containerName="kube-state-metrics" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158209 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158241 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158250 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="probe" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158261 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-server" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158274 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158281 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158288 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="735cd01e-931e-44b0-968d-ebe114278896" containerName="ovsdbserver-sb" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158299 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158329 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" containerName="barbican-keystone-listener-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158336 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" containerName="barbican-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158345 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158352 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158361 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="ovsdbserver-nb" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158371 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158406 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f969273-1c0e-4ac7-aae1-e04cea6c864f" containerName="dnsmasq-dns" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158415 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="946e1406-8dc2-445d-9d5a-f801260d554b" containerName="cinder-scheduler" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158425 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" containerName="nova-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158433 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" containerName="galera" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158439 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="020e76da-9968-4212-a34c-c01c8f8979de" containerName="placement-api" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158446 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" containerName="barbican-worker" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158457 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f721b33-b6df-4e86-90bc-52a7d0c49cbf" containerName="proxy-httpd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158486 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" containerName="glance-httpd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158502 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="13737e91-3ce4-4bcc-a605-d1591596c446" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158509 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api-log" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158518 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cbadb0-cfc2-4ade-9e82-efd876532994" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.158527 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa49318e-e2cd-40c4-910c-3e91feae5e73" containerName="openstack-network-exporter" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.159350 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.168849 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-api-access-pgk5v" (OuterVolumeSpecName: "kube-api-access-pgk5v") pod "8e079db6-a9e0-464b-b99d-57887190a5b8" (UID: "8e079db6-a9e0-464b-b99d-57887190a5b8"). InnerVolumeSpecName "kube-api-access-pgk5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.168903 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-kube-api-access-rhhlr" (OuterVolumeSpecName: "kube-api-access-rhhlr") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "kube-api-access-rhhlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.168920 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/020e76da-9968-4212-a34c-c01c8f8979de-kube-api-access-kvrsm" (OuterVolumeSpecName: "kube-api-access-kvrsm") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "kube-api-access-kvrsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.169337 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3076b4c6-b401-48a5-8343-a34d9c979ea3-kube-api-access-zg52x" (OuterVolumeSpecName: "kube-api-access-zg52x") pod "3076b4c6-b401-48a5-8343-a34d9c979ea3" (UID: "3076b4c6-b401-48a5-8343-a34d9c979ea3"). InnerVolumeSpecName "kube-api-access-zg52x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.171837 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.171975 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.178452 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.181435 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.200325 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-config-data\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.200384 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-logs\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.200425 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-internal-tls-certs\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.201895 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-scripts\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.202145 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-httpd-run\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.202275 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx92k\" (UniqueName: \"kubernetes.io/projected/4508389c-d1b8-4646-902e-4fbb597de2b7-kube-api-access-lx92k\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.202493 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.202730 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-combined-ca-bundle\") pod \"4508389c-d1b8-4646-902e-4fbb597de2b7\" (UID: \"4508389c-d1b8-4646-902e-4fbb597de2b7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.202932 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.203019 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-logs" (OuterVolumeSpecName: "logs") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.204312 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205298 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvrsm\" (UniqueName: \"kubernetes.io/projected/020e76da-9968-4212-a34c-c01c8f8979de-kube-api-access-kvrsm\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205411 4822 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205519 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3076b4c6-b401-48a5-8343-a34d9c979ea3-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205613 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205690 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205804 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhhlr\" (UniqueName: \"kubernetes.io/projected/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-kube-api-access-rhhlr\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.205910 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.206047 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.206126 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgk5v\" (UniqueName: \"kubernetes.io/projected/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-api-access-pgk5v\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.206201 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4508389c-d1b8-4646-902e-4fbb597de2b7-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.206273 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg52x\" (UniqueName: \"kubernetes.io/projected/3076b4c6-b401-48a5-8343-a34d9c979ea3-kube-api-access-zg52x\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.212787 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-scripts" (OuterVolumeSpecName: "scripts") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.214396 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.223554 4822 scope.go:117] "RemoveContainer" containerID="3fa7a9f9bdcb1379ddf677ccdeca716a03c96ecdbe0e60bfa23301dbe79a2174" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.228951 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.237281 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.238076 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4508389c-d1b8-4646-902e-4fbb597de2b7-kube-api-access-lx92k" (OuterVolumeSpecName: "kube-api-access-lx92k") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "kube-api-access-lx92k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.238732 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-config-data" (OuterVolumeSpecName: "config-data") pod "3076b4c6-b401-48a5-8343-a34d9c979ea3" (UID: "3076b4c6-b401-48a5-8343-a34d9c979ea3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.244146 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.250170 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.267747 4822 scope.go:117] "RemoveContainer" containerID="d9ad4dec5b7ced2f8a0abc30fb3b367a52102c69895a9418a23c7124dd5666eb" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.271739 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.294193 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.304810 4822 scope.go:117] "RemoveContainer" containerID="0904f4c58b2a13e4a8f5dea770e37c7608b4c6d0e98aeca868ba95e684843122" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.307470 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-nova-metadata-tls-certs\") pod \"b725dbb9-8785-43a1-9f35-215938938f6e\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.307902 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bm4g\" (UniqueName: \"kubernetes.io/projected/46169b3f-3e1f-4601-a82e-f3ea1bdde003-kube-api-access-7bm4g\") pod \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.308105 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-config-data\") pod \"baab33a8-22b8-4097-8c91-73d5f005fdf7\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.308425 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-combined-ca-bundle\") pod \"baab33a8-22b8-4097-8c91-73d5f005fdf7\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.308624 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b725dbb9-8785-43a1-9f35-215938938f6e-logs\") pod \"b725dbb9-8785-43a1-9f35-215938938f6e\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.308787 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzr9r\" (UniqueName: \"kubernetes.io/projected/b725dbb9-8785-43a1-9f35-215938938f6e-kube-api-access-bzr9r\") pod \"b725dbb9-8785-43a1-9f35-215938938f6e\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.308899 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-memcached-tls-certs\") pod \"baab33a8-22b8-4097-8c91-73d5f005fdf7\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309016 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-combined-ca-bundle\") pod \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309096 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-config-data\") pod \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\" (UID: \"46169b3f-3e1f-4601-a82e-f3ea1bdde003\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309221 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-internal-tls-certs\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309368 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data-custom\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309454 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-combined-ca-bundle\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309543 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-combined-ca-bundle\") pod \"b725dbb9-8785-43a1-9f35-215938938f6e\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309655 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-public-tls-certs\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309768 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-config-data\") pod \"b725dbb9-8785-43a1-9f35-215938938f6e\" (UID: \"b725dbb9-8785-43a1-9f35-215938938f6e\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309888 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x42f\" (UniqueName: \"kubernetes.io/projected/baab33a8-22b8-4097-8c91-73d5f005fdf7-kube-api-access-7x42f\") pod \"baab33a8-22b8-4097-8c91-73d5f005fdf7\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309977 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gv5gn\" (UniqueName: \"kubernetes.io/projected/4f5cf9f6-a48b-455b-aef3-952697eb1a09-kube-api-access-gv5gn\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.310073 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-kolla-config\") pod \"baab33a8-22b8-4097-8c91-73d5f005fdf7\" (UID: \"baab33a8-22b8-4097-8c91-73d5f005fdf7\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.310156 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f5cf9f6-a48b-455b-aef3-952697eb1a09-logs\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.310258 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data\") pod \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\" (UID: \"4f5cf9f6-a48b-455b-aef3-952697eb1a09\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.310666 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-secret-volume\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.310798 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-config-volume\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.310943 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zmh9\" (UniqueName: \"kubernetes.io/projected/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-kube-api-access-6zmh9\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.311053 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.311114 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.311181 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.311241 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.311297 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx92k\" (UniqueName: \"kubernetes.io/projected/4508389c-d1b8-4646-902e-4fbb597de2b7-kube-api-access-lx92k\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.308797 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-config-data" (OuterVolumeSpecName: "config-data") pod "baab33a8-22b8-4097-8c91-73d5f005fdf7" (UID: "baab33a8-22b8-4097-8c91-73d5f005fdf7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.309231 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b725dbb9-8785-43a1-9f35-215938938f6e-logs" (OuterVolumeSpecName: "logs") pod "b725dbb9-8785-43a1-9f35-215938938f6e" (UID: "b725dbb9-8785-43a1-9f35-215938938f6e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.314575 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.314651 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data podName:da2985c5-716e-43ad-b892-ea29d88fa639 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:08.31463115 +0000 UTC m=+1463.635438836 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data") pod "rabbitmq-server-0" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639") : configmap "rabbitmq-config-data" not found Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.315523 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "baab33a8-22b8-4097-8c91-73d5f005fdf7" (UID: "baab33a8-22b8-4097-8c91-73d5f005fdf7"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.316815 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f5cf9f6-a48b-455b-aef3-952697eb1a09-logs" (OuterVolumeSpecName: "logs") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.331105 4822 scope.go:117] "RemoveContainer" containerID="ec6d0903bf7c036cf0d4792cee042afc8037b300b849a72d91c313222cd086a0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.356168 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data" (OuterVolumeSpecName: "config-data") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.359475 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baab33a8-22b8-4097-8c91-73d5f005fdf7-kube-api-access-7x42f" (OuterVolumeSpecName: "kube-api-access-7x42f") pod "baab33a8-22b8-4097-8c91-73d5f005fdf7" (UID: "baab33a8-22b8-4097-8c91-73d5f005fdf7"). InnerVolumeSpecName "kube-api-access-7x42f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.359686 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b725dbb9-8785-43a1-9f35-215938938f6e-kube-api-access-bzr9r" (OuterVolumeSpecName: "kube-api-access-bzr9r") pod "b725dbb9-8785-43a1-9f35-215938938f6e" (UID: "b725dbb9-8785-43a1-9f35-215938938f6e"). InnerVolumeSpecName "kube-api-access-bzr9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.360282 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.361148 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46169b3f-3e1f-4601-a82e-f3ea1bdde003-kube-api-access-7bm4g" (OuterVolumeSpecName: "kube-api-access-7bm4g") pod "46169b3f-3e1f-4601-a82e-f3ea1bdde003" (UID: "46169b3f-3e1f-4601-a82e-f3ea1bdde003"). InnerVolumeSpecName "kube-api-access-7bm4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.361880 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e079db6-a9e0-464b-b99d-57887190a5b8" (UID: "8e079db6-a9e0-464b-b99d-57887190a5b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.361981 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f5cf9f6-a48b-455b-aef3-952697eb1a09-kube-api-access-gv5gn" (OuterVolumeSpecName: "kube-api-access-gv5gn") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "kube-api-access-gv5gn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.371221 4822 generic.go:334] "Generic (PLEG): container finished" podID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" exitCode=0 Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.371286 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"46169b3f-3e1f-4601-a82e-f3ea1bdde003","Type":"ContainerDied","Data":"8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.371311 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"46169b3f-3e1f-4601-a82e-f3ea1bdde003","Type":"ContainerDied","Data":"5022f3515428abc8ff3b3967e69b03fc5baecda0100975c5f849a1c02c12bed7"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.371365 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.375887 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.377600 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3076b4c6-b401-48a5-8343-a34d9c979ea3","Type":"ContainerDied","Data":"a326356410cb4bde6ccc7c21c0c89785aa237faedd662b8a1d70028efd4dfba9"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.377711 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.382072 4822 scope.go:117] "RemoveContainer" containerID="eac2f0524072a06a976ebc4a00a7e18a6672bbea7169f996d10dfd7fb8ff0ee3" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.386529 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance8845-account-delete-rb8lw" event={"ID":"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0","Type":"ContainerDied","Data":"7a30677c2bd6d59234c989587987e01e381e20c786abdfb4c209036795866348"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.386556 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a30677c2bd6d59234c989587987e01e381e20c786abdfb4c209036795866348" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.386727 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance8845-account-delete-rb8lw" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.388093 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8e079db6-a9e0-464b-b99d-57887190a5b8","Type":"ContainerDied","Data":"a1f2ee5ca2f9abf0bcfd5525c4498f0a6a3574e349cecda3177d2c3bce0d4e67"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.388159 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.389902 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"74508f35-c5cd-4e07-8883-831d2de65f35","Type":"ContainerDied","Data":"4711cfe676f2c790f085f0e01970d48fc145022b8fd3ff571d022fde881b6a78"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.389975 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.392272 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b725dbb9-8785-43a1-9f35-215938938f6e","Type":"ContainerDied","Data":"3105a0c08ab94f69d3f6b5522d66449b5a15b292779f6d4bf38d75b99d86e6e1"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.392351 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.394191 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"baab33a8-22b8-4097-8c91-73d5f005fdf7","Type":"ContainerDied","Data":"e410087fab5cfe7c763eb2aa572de8903db0720ce743da2ef666d7bdecbe0b67"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.394268 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.400444 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3076b4c6-b401-48a5-8343-a34d9c979ea3" (UID: "3076b4c6-b401-48a5-8343-a34d9c979ea3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.413668 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-scripts\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.414385 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-combined-ca-bundle\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.414661 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4btb5\" (UniqueName: \"kubernetes.io/projected/33a2ee0e-4d60-46f9-9f2a-a094af634a64-kube-api-access-4btb5\") pod \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.414866 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgbdk\" (UniqueName: \"kubernetes.io/projected/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-kube-api-access-pgbdk\") pod \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.414986 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llw5p\" (UniqueName: \"kubernetes.io/projected/74508f35-c5cd-4e07-8883-831d2de65f35-kube-api-access-llw5p\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415079 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-httpd-run\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415113 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-logs\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415182 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a2ee0e-4d60-46f9-9f2a-a094af634a64-operator-scripts\") pod \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\" (UID: \"33a2ee0e-4d60-46f9-9f2a-a094af634a64\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415235 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-operator-scripts\") pod \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\" (UID: \"11e4a2b6-5901-43b2-ab37-ab16b0ac03b0\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415377 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-config-data\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415436 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-public-tls-certs\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.415498 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"74508f35-c5cd-4e07-8883-831d2de65f35\" (UID: \"74508f35-c5cd-4e07-8883-831d2de65f35\") " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416166 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-secret-volume\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416331 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-config-volume\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416493 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zmh9\" (UniqueName: \"kubernetes.io/projected/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-kube-api-access-6zmh9\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416615 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416675 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x42f\" (UniqueName: \"kubernetes.io/projected/baab33a8-22b8-4097-8c91-73d5f005fdf7-kube-api-access-7x42f\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416698 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gv5gn\" (UniqueName: \"kubernetes.io/projected/4f5cf9f6-a48b-455b-aef3-952697eb1a09-kube-api-access-gv5gn\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416716 4822 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416730 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f5cf9f6-a48b-455b-aef3-952697eb1a09-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416771 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416786 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bm4g\" (UniqueName: \"kubernetes.io/projected/46169b3f-3e1f-4601-a82e-f3ea1bdde003-kube-api-access-7bm4g\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416801 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baab33a8-22b8-4097-8c91-73d5f005fdf7-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416815 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b725dbb9-8785-43a1-9f35-215938938f6e-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416855 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416870 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzr9r\" (UniqueName: \"kubernetes.io/projected/b725dbb9-8785-43a1-9f35-215938938f6e-kube-api-access-bzr9r\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416885 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416898 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.416964 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.417816 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33a2ee0e-4d60-46f9-9f2a-a094af634a64-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33a2ee0e-4d60-46f9-9f2a-a094af634a64" (UID: "33a2ee0e-4d60-46f9-9f2a-a094af634a64"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.418478 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-logs" (OuterVolumeSpecName: "logs") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.418624 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-config-volume\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.418655 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" (UID: "11e4a2b6-5901-43b2-ab37-ab16b0ac03b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.419854 4822 scope.go:117] "RemoveContainer" containerID="d67bae7c70f17ec833b424c5adfa870275d1a2d4526a3192bf63ea5af23f096a" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.423109 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-scripts" (OuterVolumeSpecName: "scripts") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.423255 4822 generic.go:334] "Generic (PLEG): container finished" podID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerID="ee62047f058f3d3b037c7b5c164a2486cf6fd7832ad234039229326d5da5cb6e" exitCode=0 Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.423307 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerDied","Data":"ee62047f058f3d3b037c7b5c164a2486cf6fd7832ad234039229326d5da5cb6e"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.423328 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-kube-api-access-pgbdk" (OuterVolumeSpecName: "kube-api-access-pgbdk") pod "11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" (UID: "11e4a2b6-5901-43b2-ab37-ab16b0ac03b0"). InnerVolumeSpecName "kube-api-access-pgbdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.426033 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-secret-volume\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.426917 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.428435 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.429028 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.429361 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.429186 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.429584 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4508389c-d1b8-4646-902e-4fbb597de2b7","Type":"ContainerDied","Data":"a015b79c865e58f510b41e6d96d06aea686c1ce7cd540abaa49dfbc4e0988fda"} Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.430142 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.430415 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.430446 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.431274 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.431297 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.432329 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderac9b-account-delete-lmq7r" event={"ID":"33a2ee0e-4d60-46f9-9f2a-a094af634a64","Type":"ContainerDied","Data":"d88670111ab794874def4938e40068ce98af57a7651c75d59f3a6419788471df"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.432389 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d88670111ab794874def4938e40068ce98af57a7651c75d59f3a6419788471df" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.432460 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderac9b-account-delete-lmq7r" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.439078 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement75d6-account-delete-46gbh" podUID="a794a063-7f9d-4a0f-9cf7-ae70b70769eb" containerName="mariadb-account-delete" containerID="cri-o://2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20" gracePeriod=30 Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.439217 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.439728 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron40f1-account-delete-rb6h6" podUID="b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" containerName="mariadb-account-delete" containerID="cri-o://f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d" gracePeriod=30 Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.439722 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.439865 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bdbb4cd8b-5ttmd" event={"ID":"4f5cf9f6-a48b-455b-aef3-952697eb1a09","Type":"ContainerDied","Data":"4a89ea1e3c447a88bb5d40e8e613938ea48ad7d41aaeb9ec0c67b3af6a90a9f9"} Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.440599 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55d788fdd6-vxlcs" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.441361 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novaapi33d4-account-delete-5vtpn" podUID="b1312168-fba0-46d6-8ca3-346303262924" containerName="mariadb-account-delete" containerID="cri-o://a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a" gracePeriod=30 Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.449141 4822 scope.go:117] "RemoveContainer" containerID="7141addb9b9ef0268fb154fd7e37bf5e2bf29947673a762e71450248e473f5d6" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.450972 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74508f35-c5cd-4e07-8883-831d2de65f35-kube-api-access-llw5p" (OuterVolumeSpecName: "kube-api-access-llw5p") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "kube-api-access-llw5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.451036 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33a2ee0e-4d60-46f9-9f2a-a094af634a64-kube-api-access-4btb5" (OuterVolumeSpecName: "kube-api-access-4btb5") pod "33a2ee0e-4d60-46f9-9f2a-a094af634a64" (UID: "33a2ee0e-4d60-46f9-9f2a-a094af634a64"). InnerVolumeSpecName "kube-api-access-4btb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.461746 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.462746 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zmh9\" (UniqueName: \"kubernetes.io/projected/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-kube-api-access-6zmh9\") pod \"collect-profiles-29409555-z4lqz\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.464128 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.465697 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.470823 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.470877 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerName="nova-scheduler-scheduler" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.484214 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3076b4c6-b401-48a5-8343-a34d9c979ea3" (UID: "3076b4c6-b401-48a5-8343-a34d9c979ea3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.485305 4822 scope.go:117] "RemoveContainer" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.487747 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "8e079db6-a9e0-464b-b99d-57887190a5b8" (UID: "8e079db6-a9e0-464b-b99d-57887190a5b8"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.499220 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" (UID: "22f94321-d0ce-48f5-82fa-a0b60b5b1dd3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.521987 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a2ee0e-4d60-46f9-9f2a-a094af634a64-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522020 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522042 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522051 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522060 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522070 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522078 4822 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522088 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4btb5\" (UniqueName: \"kubernetes.io/projected/33a2ee0e-4d60-46f9-9f2a-a094af634a64-kube-api-access-4btb5\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522097 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgbdk\" (UniqueName: \"kubernetes.io/projected/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0-kube-api-access-pgbdk\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522106 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522114 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llw5p\" (UniqueName: \"kubernetes.io/projected/74508f35-c5cd-4e07-8883-831d2de65f35-kube-api-access-llw5p\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522122 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.522131 4822 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74508f35-c5cd-4e07-8883-831d2de65f35-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.533260 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baab33a8-22b8-4097-8c91-73d5f005fdf7" (UID: "baab33a8-22b8-4097-8c91-73d5f005fdf7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.554726 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-config-data" (OuterVolumeSpecName: "config-data") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.563078 4822 scope.go:117] "RemoveContainer" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.563601 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066\": container with ID starting with 8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066 not found: ID does not exist" containerID="8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.563635 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066"} err="failed to get container status \"8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066\": rpc error: code = NotFound desc = could not find container \"8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066\": container with ID starting with 8d6073faa0b0763eb4b8f62410ec96e846a7f4ba8749bbc03e0b2f2c3644c066 not found: ID does not exist" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.563654 4822 scope.go:117] "RemoveContainer" containerID="2737b375be318a62945c13daa6a442680918aa2addf2fb31b4a1be4501d00b4c" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.584098 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.584552 4822 scope.go:117] "RemoveContainer" containerID="99c5094f6036c81b6e4b694863a462ef98197e4530f7c0d2c8e4bc0a7902f727" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.611105 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.611777 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-config-data" (OuterVolumeSpecName: "config-data") pod "46169b3f-3e1f-4601-a82e-f3ea1bdde003" (UID: "46169b3f-3e1f-4601-a82e-f3ea1bdde003"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.611812 4822 scope.go:117] "RemoveContainer" containerID="52b80d1f9eb289b2321d023ee74702200d73e7e0bcf824e5f0e71860cdbd014f" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.623743 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.623774 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.623787 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.623799 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.633452 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3076b4c6-b401-48a5-8343-a34d9c979ea3" (UID: "3076b4c6-b401-48a5-8343-a34d9c979ea3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.636516 4822 scope.go:117] "RemoveContainer" containerID="70b0818800c2616bc33b123c3a4baa1c0af71f6cf5e88867c893507114ae9b3c" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.661000 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.682091 4822 scope.go:117] "RemoveContainer" containerID="8c7dc884cc27a11dfe3c85bef88b9e7c88b3ac50ba6cb9054583b104030b6680" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.685629 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.721770 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "8e079db6-a9e0-464b-b99d-57887190a5b8" (UID: "8e079db6-a9e0-464b-b99d-57887190a5b8"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.726590 4822 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e079db6-a9e0-464b-b99d-57887190a5b8-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.726622 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3076b4c6-b401-48a5-8343-a34d9c979ea3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.726634 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.726671 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.726736 4822 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.726899 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data podName:a1229c08-35a5-4f16-8334-f32bb9b852b6 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:08.726881216 +0000 UTC m=+1464.047688902 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data") pod "rabbitmq-cell1-server-0" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6") : configmap "rabbitmq-cell1-config-data" not found Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.740011 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-q78vl" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.752401 4822 scope.go:117] "RemoveContainer" containerID="53ee8c277f9abfc79fafeffabdd86dd7daf8f3652232dfbf0acccc1cc46201b6" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.798241 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.798727 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderac9b-account-delete-lmq7r"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.830309 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46169b3f-3e1f-4601-a82e-f3ea1bdde003" (UID: "46169b3f-3e1f-4601-a82e-f3ea1bdde003"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.830455 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.832680 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.852660 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.852689 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.852700 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46169b3f-3e1f-4601-a82e-f3ea1bdde003-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.852712 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.852778 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:00 crc kubenswrapper[4822]: E1201 07:15:00.852834 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts podName:a794a063-7f9d-4a0f-9cf7-ae70b70769eb nodeName:}" failed. No retries permitted until 2025-12-01 07:15:02.852820784 +0000 UTC m=+1458.173628460 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts") pod "placement75d6-account-delete-46gbh" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb") : configmap "openstack-scripts" not found Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.862656 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinderac9b-account-delete-lmq7r"] Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.867069 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-q78vl" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" probeResult="failure" output=< Dec 01 07:15:00 crc kubenswrapper[4822]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Dec 01 07:15:00 crc kubenswrapper[4822]: > Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.877348 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-config-data" (OuterVolumeSpecName: "config-data") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.929800 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b725dbb9-8785-43a1-9f35-215938938f6e" (UID: "b725dbb9-8785-43a1-9f35-215938938f6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.934983 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.950089 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.951416 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.951630 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-config-data" (OuterVolumeSpecName: "config-data") pod "b725dbb9-8785-43a1-9f35-215938938f6e" (UID: "b725dbb9-8785-43a1-9f35-215938938f6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.953793 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.953816 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.953826 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.953834 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.953843 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.953852 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.958870 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4508389c-d1b8-4646-902e-4fbb597de2b7" (UID: "4508389c-d1b8-4646-902e-4fbb597de2b7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.991273 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "baab33a8-22b8-4097-8c91-73d5f005fdf7" (UID: "baab33a8-22b8-4097-8c91-73d5f005fdf7"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.992471 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33a2ee0e-4d60-46f9-9f2a-a094af634a64" path="/var/lib/kubelet/pods/33a2ee0e-4d60-46f9-9f2a-a094af634a64/volumes" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.993076 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50ff5a68-493b-4e62-9e88-a8f1d0b4d78a" path="/var/lib/kubelet/pods/50ff5a68-493b-4e62-9e88-a8f1d0b4d78a/volumes" Dec 01 07:15:00 crc kubenswrapper[4822]: I1201 07:15:00.994750 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2b0531f-a401-46e6-80f0-7f0023e9a0d6" path="/var/lib/kubelet/pods/e2b0531f-a401-46e6-80f0-7f0023e9a0d6/volumes" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.008688 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-config-data" (OuterVolumeSpecName: "config-data") pod "74508f35-c5cd-4e07-8883-831d2de65f35" (UID: "74508f35-c5cd-4e07-8883-831d2de65f35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.030414 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data" (OuterVolumeSpecName: "config-data") pod "4f5cf9f6-a48b-455b-aef3-952697eb1a09" (UID: "4f5cf9f6-a48b-455b-aef3-952697eb1a09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.032830 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "020e76da-9968-4212-a34c-c01c8f8979de" (UID: "020e76da-9968-4212-a34c-c01c8f8979de"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.037244 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.202:3000/\": dial tcp 10.217.0.202:3000: connect: connection refused" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.057882 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/020e76da-9968-4212-a34c-c01c8f8979de-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.057908 4822 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/baab33a8-22b8-4097-8c91-73d5f005fdf7-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.057917 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4508389c-d1b8-4646-902e-4fbb597de2b7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.057926 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f5cf9f6-a48b-455b-aef3-952697eb1a09-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.057935 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74508f35-c5cd-4e07-8883-831d2de65f35-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.058013 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.058061 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts podName:b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:03.058045396 +0000 UTC m=+1458.378853082 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts") pod "neutron40f1-account-delete-rb6h6" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3") : configmap "openstack-scripts" not found Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.058375 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.058402 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts podName:b1312168-fba0-46d6-8ca3-346303262924 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:03.058395146 +0000 UTC m=+1458.379202832 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts") pod "novaapi33d4-account-delete-5vtpn" (UID: "b1312168-fba0-46d6-8ca3-346303262924") : configmap "openstack-scripts" not found Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.058915 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b725dbb9-8785-43a1-9f35-215938938f6e" (UID: "b725dbb9-8785-43a1-9f35-215938938f6e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.158096 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.159692 4822 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b725dbb9-8785-43a1-9f35-215938938f6e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.159751 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.160845 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.160901 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerName="nova-cell0-conductor-conductor" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.265901 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.291941 4822 scope.go:117] "RemoveContainer" containerID="28abb98621c9dbea0f60ad4699e037dfb98ecedc14b6f3984d6224c5cbeecd9c" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.382629 4822 scope.go:117] "RemoveContainer" containerID="9d2642fba08e82f7fab50725c4908dd7f41042b114c66469823905f1b4131ec5" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.468810 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerID="eb9c3b14412dab696cda88040a38b9cdac23b8d8872cc1f5f086a31ccba67ffc" exitCode=0 Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.468871 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1229c08-35a5-4f16-8334-f32bb9b852b6","Type":"ContainerDied","Data":"eb9c3b14412dab696cda88040a38b9cdac23b8d8872cc1f5f086a31ccba67ffc"} Dec 01 07:15:01 crc kubenswrapper[4822]: W1201 07:15:01.480588 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1033bda_45cf_46f7_b21d_1d12a4a4a33a.slice/crio-6f5525c0e7d091e9715a61d26a10463a1f4efb78fa1c0511b5861c6795ccc151 WatchSource:0}: Error finding container 6f5525c0e7d091e9715a61d26a10463a1f4efb78fa1c0511b5861c6795ccc151: Status 404 returned error can't find the container with id 6f5525c0e7d091e9715a61d26a10463a1f4efb78fa1c0511b5861c6795ccc151 Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.489218 4822 generic.go:334] "Generic (PLEG): container finished" podID="da2985c5-716e-43ad-b892-ea29d88fa639" containerID="67b4ee72481fb3afcfc3392e80b6461b38dd56f3a4807eaae7eb4e9cb55e7a0c" exitCode=0 Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.489320 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"da2985c5-716e-43ad-b892-ea29d88fa639","Type":"ContainerDied","Data":"67b4ee72481fb3afcfc3392e80b6461b38dd56f3a4807eaae7eb4e9cb55e7a0c"} Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.489382 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"da2985c5-716e-43ad-b892-ea29d88fa639","Type":"ContainerDied","Data":"3b70674c1661bf90811cb699f003b7aa68430cfcb82f91da21dfcdebe07a7455"} Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.489397 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b70674c1661bf90811cb699f003b7aa68430cfcb82f91da21dfcdebe07a7455" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.516915 4822 generic.go:334] "Generic (PLEG): container finished" podID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerID="bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453" exitCode=0 Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.517005 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ad18272e-45a9-40cd-8b46-2de8cb3a31be","Type":"ContainerDied","Data":"bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453"} Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.590269 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.612037 4822 scope.go:117] "RemoveContainer" containerID="b3c842a2cb463bfede65789670bfcdeca18f9089a409fd8da36121671752306c" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.647571 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.663973 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.674355 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.680368 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.680884 4822 scope.go:117] "RemoveContainer" containerID="0a586d29377fca2fb0c88e3caf95850b3d5091a08ee0961b98a076104df131df" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681229 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-erlang-cookie\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681274 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-confd\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681358 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681379 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4mn7\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-kube-api-access-h4mn7\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681410 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681461 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/da2985c5-716e-43ad-b892-ea29d88fa639-erlang-cookie-secret\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.681487 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/da2985c5-716e-43ad-b892-ea29d88fa639-pod-info\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.682020 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.682162 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-plugins-conf\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.682191 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-plugins\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.682271 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-server-conf\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.682298 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-tls\") pod \"da2985c5-716e-43ad-b892-ea29d88fa639\" (UID: \"da2985c5-716e-43ad-b892-ea29d88fa639\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.682664 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.683005 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.683333 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.690238 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.692601 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da2985c5-716e-43ad-b892-ea29d88fa639-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.694988 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.697747 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-kube-api-access-h4mn7" (OuterVolumeSpecName: "kube-api-access-h4mn7") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "kube-api-access-h4mn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.697934 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.699783 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.700911 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-55d788fdd6-vxlcs"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.707594 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-55d788fdd6-vxlcs"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.713023 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.719061 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.719802 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/da2985c5-716e-43ad-b892-ea29d88fa639-pod-info" (OuterVolumeSpecName: "pod-info") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.731341 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6bdbb4cd8b-5ttmd"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.746697 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6bdbb4cd8b-5ttmd"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.766929 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.784906 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.785204 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.785214 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4mn7\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-kube-api-access-h4mn7\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.785223 4822 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/da2985c5-716e-43ad-b892-ea29d88fa639-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.785232 4822 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/da2985c5-716e-43ad-b892-ea29d88fa639-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.785240 4822 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.785248 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.789374 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.789424 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:15:05.78940951 +0000 UTC m=+1461.110217196 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.797913 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.836529 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.860303 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.866009 4822 scope.go:117] "RemoveContainer" containerID="3e945f46d94da2037046bdb5e78c0bab8aeb085af05bb39debf197f614a96054" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.876324 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.881162 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 01 07:15:01 crc kubenswrapper[4822]: E1201 07:15:01.881628 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb725dbb9_8785_43a1_9f35_215938938f6e.slice/crio-3105a0c08ab94f69d3f6b5522d66449b5a15b292779f6d4bf38d75b99d86e6e1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74508f35_c5cd_4e07_8883_831d2de65f35.slice/crio-4711cfe676f2c790f085f0e01970d48fc145022b8fd3ff571d022fde881b6a78\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb422e423_4e50_4e96_a341_d7bb5188c4af.slice/crio-65c8f8c2ad754b11428eef89af4d4e06df73f318c729e1210811528d1ba1f58d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74508f35_c5cd_4e07_8883_831d2de65f35.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb725dbb9_8785_43a1_9f35_215938938f6e.slice\": RecentStats: unable to find data in memory cache]" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.920560 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-plugins\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.920645 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1229c08-35a5-4f16-8334-f32bb9b852b6-erlang-cookie-secret\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.921132 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922046 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-server-conf" (OuterVolumeSpecName: "server-conf") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922355 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-confd\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922427 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-erlang-cookie\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922460 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-plugins-conf\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922544 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922670 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-tls\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922704 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922726 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-server-conf\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922776 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1229c08-35a5-4f16-8334-f32bb9b852b6-pod-info\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.922843 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79brf\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-kube-api-access-79brf\") pod \"a1229c08-35a5-4f16-8334-f32bb9b852b6\" (UID: \"a1229c08-35a5-4f16-8334-f32bb9b852b6\") " Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.923220 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.923947 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.923980 4822 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.923992 4822 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.924004 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.924455 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.927426 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.936143 4822 scope.go:117] "RemoveContainer" containerID="b2656b9a99cb8b9b157e9e99731e4fa691f7956b8db22c14b097df4e6e5d524e" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.937497 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.957259 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.966666 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.980028 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-kube-api-access-79brf" (OuterVolumeSpecName: "kube-api-access-79brf") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "kube-api-access-79brf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.980135 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data" (OuterVolumeSpecName: "config-data") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.981496 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.991537 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a1229c08-35a5-4f16-8334-f32bb9b852b6-pod-info" (OuterVolumeSpecName: "pod-info") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.994439 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1229c08-35a5-4f16-8334-f32bb9b852b6-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:01 crc kubenswrapper[4822]: I1201 07:15:01.994584 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.019954 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "da2985c5-716e-43ad-b892-ea29d88fa639" (UID: "da2985c5-716e-43ad-b892-ea29d88fa639"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026712 4822 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1229c08-35a5-4f16-8334-f32bb9b852b6-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026742 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79brf\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-kube-api-access-79brf\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026754 4822 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1229c08-35a5-4f16-8334-f32bb9b852b6-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026764 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/da2985c5-716e-43ad-b892-ea29d88fa639-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026773 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026782 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/da2985c5-716e-43ad-b892-ea29d88fa639-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026790 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.026807 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.067265 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-server-conf" (OuterVolumeSpecName: "server-conf") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.075357 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.086954 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data" (OuterVolumeSpecName: "config-data") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.130206 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.130242 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.130251 4822 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1229c08-35a5-4f16-8334-f32bb9b852b6-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.142473 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a1229c08-35a5-4f16-8334-f32bb9b852b6" (UID: "a1229c08-35a5-4f16-8334-f32bb9b852b6"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.179047 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.182575 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fd2326e5-f8a2-47ca-8519-576caa1825c5/ovn-northd/0.log" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.182618 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235602 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-combined-ca-bundle\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235654 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-metrics-certs-tls-certs\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235691 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-combined-ca-bundle\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235735 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235778 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kolla-config\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235803 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-northd-tls-certs\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235824 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-default\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235856 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtkc2\" (UniqueName: \"kubernetes.io/projected/fd2326e5-f8a2-47ca-8519-576caa1825c5-kube-api-access-dtkc2\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235874 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-generated\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235896 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-scripts\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235916 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvcxd\" (UniqueName: \"kubernetes.io/projected/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kube-api-access-cvcxd\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235959 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-galera-tls-certs\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.235988 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-rundir\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.236025 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-operator-scripts\") pod \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\" (UID: \"ad18272e-45a9-40cd-8b46-2de8cb3a31be\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.236081 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-config\") pod \"fd2326e5-f8a2-47ca-8519-576caa1825c5\" (UID: \"fd2326e5-f8a2-47ca-8519-576caa1825c5\") " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.236606 4822 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1229c08-35a5-4f16-8334-f32bb9b852b6-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.237427 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-config" (OuterVolumeSpecName: "config") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.239237 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.240304 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.240568 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.241230 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-scripts" (OuterVolumeSpecName: "scripts") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.241353 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kube-api-access-cvcxd" (OuterVolumeSpecName: "kube-api-access-cvcxd") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "kube-api-access-cvcxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.244943 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.245177 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.257066 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "mysql-db") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.261055 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2326e5-f8a2-47ca-8519-576caa1825c5-kube-api-access-dtkc2" (OuterVolumeSpecName: "kube-api-access-dtkc2") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "kube-api-access-dtkc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.262821 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.290105 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.296976 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "ad18272e-45a9-40cd-8b46-2de8cb3a31be" (UID: "ad18272e-45a9-40cd-8b46-2de8cb3a31be"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.310099 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.331270 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "fd2326e5-f8a2-47ca-8519-576caa1825c5" (UID: "fd2326e5-f8a2-47ca-8519-576caa1825c5"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337060 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337086 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337097 4822 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337110 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337138 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337147 4822 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337155 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337165 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337182 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtkc2\" (UniqueName: \"kubernetes.io/projected/fd2326e5-f8a2-47ca-8519-576caa1825c5-kube-api-access-dtkc2\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337190 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvcxd\" (UniqueName: \"kubernetes.io/projected/ad18272e-45a9-40cd-8b46-2de8cb3a31be-kube-api-access-cvcxd\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337198 4822 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad18272e-45a9-40cd-8b46-2de8cb3a31be-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337207 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd2326e5-f8a2-47ca-8519-576caa1825c5-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337217 4822 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad18272e-45a9-40cd-8b46-2de8cb3a31be-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337225 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fd2326e5-f8a2-47ca-8519-576caa1825c5-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.337235 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad18272e-45a9-40cd-8b46-2de8cb3a31be-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.354209 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.440768 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.556374 4822 generic.go:334] "Generic (PLEG): container finished" podID="a1033bda-45cf-46f7-b21d-1d12a4a4a33a" containerID="60a6a784e18bcd7c87b9230c59ddc67e16647a6e9721f0b41d5b853a838fa70a" exitCode=0 Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.556461 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" event={"ID":"a1033bda-45cf-46f7-b21d-1d12a4a4a33a","Type":"ContainerDied","Data":"60a6a784e18bcd7c87b9230c59ddc67e16647a6e9721f0b41d5b853a838fa70a"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.556496 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" event={"ID":"a1033bda-45cf-46f7-b21d-1d12a4a4a33a","Type":"ContainerStarted","Data":"6f5525c0e7d091e9715a61d26a10463a1f4efb78fa1c0511b5861c6795ccc151"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.565650 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ad18272e-45a9-40cd-8b46-2de8cb3a31be","Type":"ContainerDied","Data":"6cb795531bfa3ea33e39225f945520d108064825ef5e9a8424025b33ce664262"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.565702 4822 scope.go:117] "RemoveContainer" containerID="bc0412ee3ba6291d6a77685a5f298599e54eb460cf9c8ccea6109363b76fe453" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.565810 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.617409 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-hbjw6"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.629780 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1229c08-35a5-4f16-8334-f32bb9b852b6","Type":"ContainerDied","Data":"052345f8cc8e328db9fdb06a5022ab676f27f53deca15d085b31e977883310d7"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.629907 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.634930 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-hbjw6"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.640931 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fd2326e5-f8a2-47ca-8519-576caa1825c5/ovn-northd/0.log" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.640965 4822 generic.go:334] "Generic (PLEG): container finished" podID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" exitCode=139 Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.641031 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fd2326e5-f8a2-47ca-8519-576caa1825c5","Type":"ContainerDied","Data":"131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.641054 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fd2326e5-f8a2-47ca-8519-576caa1825c5","Type":"ContainerDied","Data":"eaddab33352b9ab1ec42dbd68af1d6e936784e8dd4b6d4ad6e2bd967e94a5fd3"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.641061 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.646671 4822 generic.go:334] "Generic (PLEG): container finished" podID="b422e423-4e50-4e96-a341-d7bb5188c4af" containerID="65c8f8c2ad754b11428eef89af4d4e06df73f318c729e1210811528d1ba1f58d" exitCode=0 Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.646726 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5875588964-pg9h2" event={"ID":"b422e423-4e50-4e96-a341-d7bb5188c4af","Type":"ContainerDied","Data":"65c8f8c2ad754b11428eef89af4d4e06df73f318c729e1210811528d1ba1f58d"} Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.650869 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.660906 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-8845-account-create-update-rjq78"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.668017 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance8845-account-delete-rb8lw"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.679682 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-8845-account-create-update-rjq78"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.680988 4822 scope.go:117] "RemoveContainer" containerID="47b85be0256b3129ddbeda68dea127dd2fa6e5d1670aef3677449c16013f88ce" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.687484 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance8845-account-delete-rb8lw"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.699653 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.715876 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.726285 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.750744 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.757176 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.763564 4822 scope.go:117] "RemoveContainer" containerID="eb9c3b14412dab696cda88040a38b9cdac23b8d8872cc1f5f086a31ccba67ffc" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.772809 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.802109 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.820793 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.889516 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:15:02 crc kubenswrapper[4822]: E1201 07:15:02.890531 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:02 crc kubenswrapper[4822]: E1201 07:15:02.890600 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts podName:a794a063-7f9d-4a0f-9cf7-ae70b70769eb nodeName:}" failed. No retries permitted until 2025-12-01 07:15:06.890567755 +0000 UTC m=+1462.211375441 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts") pod "placement75d6-account-delete-46gbh" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb") : configmap "openstack-scripts" not found Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.946981 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6dhf2"] Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.950187 4822 scope.go:117] "RemoveContainer" containerID="3a3f116481c73dfe5a9a8a3545e38ee6bdbcf5cf5bee9252fffc9f947030fbe5" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.981630 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="020e76da-9968-4212-a34c-c01c8f8979de" path="/var/lib/kubelet/pods/020e76da-9968-4212-a34c-c01c8f8979de/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.982184 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" path="/var/lib/kubelet/pods/11e4a2b6-5901-43b2-ab37-ab16b0ac03b0/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.982813 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" path="/var/lib/kubelet/pods/22f94321-d0ce-48f5-82fa-a0b60b5b1dd3/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.984099 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3076b4c6-b401-48a5-8343-a34d9c979ea3" path="/var/lib/kubelet/pods/3076b4c6-b401-48a5-8343-a34d9c979ea3/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.984740 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4508389c-d1b8-4646-902e-4fbb597de2b7" path="/var/lib/kubelet/pods/4508389c-d1b8-4646-902e-4fbb597de2b7/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.985277 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" path="/var/lib/kubelet/pods/46169b3f-3e1f-4601-a82e-f3ea1bdde003/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.986534 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f5cf9f6-a48b-455b-aef3-952697eb1a09" path="/var/lib/kubelet/pods/4f5cf9f6-a48b-455b-aef3-952697eb1a09/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.990375 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" path="/var/lib/kubelet/pods/74508f35-c5cd-4e07-8883-831d2de65f35/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.992288 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e079db6-a9e0-464b-b99d-57887190a5b8" path="/var/lib/kubelet/pods/8e079db6-a9e0-464b-b99d-57887190a5b8/volumes" Dec 01 07:15:02 crc kubenswrapper[4822]: I1201 07:15:02.993048 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" path="/var/lib/kubelet/pods/a1229c08-35a5-4f16-8334-f32bb9b852b6/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.997742 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" path="/var/lib/kubelet/pods/ad18272e-45a9-40cd-8b46-2de8cb3a31be/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.998497 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" path="/var/lib/kubelet/pods/b725dbb9-8785-43a1-9f35-215938938f6e/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.999166 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-scripts\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.999248 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-credential-keys\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.999286 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-config-data\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.999315 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-public-tls-certs\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:02.999338 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w8lg\" (UniqueName: \"kubernetes.io/projected/b422e423-4e50-4e96-a341-d7bb5188c4af-kube-api-access-7w8lg\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.000054 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-internal-tls-certs\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.000198 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-combined-ca-bundle\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.014170 4822 scope.go:117] "RemoveContainer" containerID="d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.017299 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-fernet-keys\") pod \"b422e423-4e50-4e96-a341-d7bb5188c4af\" (UID: \"b422e423-4e50-4e96-a341-d7bb5188c4af\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.018157 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" path="/var/lib/kubelet/pods/baab33a8-22b8-4097-8c91-73d5f005fdf7/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.018935 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6636d12-3187-445d-afaf-2218dd71d932" path="/var/lib/kubelet/pods/d6636d12-3187-445d-afaf-2218dd71d932/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.019619 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" path="/var/lib/kubelet/pods/da2985c5-716e-43ad-b892-ea29d88fa639/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.022593 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f369e477-2b99-4f5b-abb8-eb788818325f" path="/var/lib/kubelet/pods/f369e477-2b99-4f5b-abb8-eb788818325f/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.023158 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" path="/var/lib/kubelet/pods/fd2326e5-f8a2-47ca-8519-576caa1825c5/volumes" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.053930 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.054024 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.054376 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6dhf2"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.054402 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6e4d-account-create-update-vz22v"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.054417 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6e4d-account-create-update-vz22v"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.054427 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican6e4d-account-delete-bz5tp"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.054626 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican6e4d-account-delete-bz5tp" podUID="2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" containerName="mariadb-account-delete" containerID="cri-o://f05b963fa90e51f441cfec47cbaa1a3c1e77f0b689982aacfe5510b1a4ea7520" gracePeriod=30 Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.076597 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-scripts" (OuterVolumeSpecName: "scripts") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.091511 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b422e423-4e50-4e96-a341-d7bb5188c4af-kube-api-access-7w8lg" (OuterVolumeSpecName: "kube-api-access-7w8lg") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "kube-api-access-7w8lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.109655 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.116767 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-config-data" (OuterVolumeSpecName: "config-data") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.118760 4822 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.118791 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.118800 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.118810 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w8lg\" (UniqueName: \"kubernetes.io/projected/b422e423-4e50-4e96-a341-d7bb5188c4af-kube-api-access-7w8lg\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.118822 4822 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.118832 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.118897 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.118946 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts podName:b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:07.118930259 +0000 UTC m=+1462.439737945 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts") pod "neutron40f1-account-delete-rb6h6" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3") : configmap "openstack-scripts" not found Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.118975 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.118993 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts podName:b1312168-fba0-46d6-8ca3-346303262924 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:07.1189875 +0000 UTC m=+1462.439795186 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts") pod "novaapi33d4-account-delete-5vtpn" (UID: "b1312168-fba0-46d6-8ca3-346303262924") : configmap "openstack-scripts" not found Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.124367 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.144745 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b422e423-4e50-4e96-a341-d7bb5188c4af" (UID: "b422e423-4e50-4e96-a341-d7bb5188c4af"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.187725 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.193358 4822 scope.go:117] "RemoveContainer" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.219220 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-config-data\") pod \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.219266 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-combined-ca-bundle\") pod \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.219327 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m65p5\" (UniqueName: \"kubernetes.io/projected/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-kube-api-access-m65p5\") pod \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\" (UID: \"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.219609 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.219627 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b422e423-4e50-4e96-a341-d7bb5188c4af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.223179 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-kube-api-access-m65p5" (OuterVolumeSpecName: "kube-api-access-m65p5") pod "2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" (UID: "2ea36c73-7cc2-4da4-a6f9-14d0af7c7210"). InnerVolumeSpecName "kube-api-access-m65p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.223980 4822 scope.go:117] "RemoveContainer" containerID="d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5" Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.224458 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5\": container with ID starting with d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5 not found: ID does not exist" containerID="d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.224495 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5"} err="failed to get container status \"d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5\": rpc error: code = NotFound desc = could not find container \"d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5\": container with ID starting with d3e61be1e4b99b3aa49a04b5d222a3c9dda489b7322865d33fb2e07ee50a47b5 not found: ID does not exist" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.224519 4822 scope.go:117] "RemoveContainer" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.224760 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038\": container with ID starting with 131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038 not found: ID does not exist" containerID="131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.224785 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038"} err="failed to get container status \"131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038\": rpc error: code = NotFound desc = could not find container \"131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038\": container with ID starting with 131f9739298541f27e1bf407be03ec8e5c55d4d482b4ea6e1de85d0d96e11038 not found: ID does not exist" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.242760 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" (UID: "2ea36c73-7cc2-4da4-a6f9-14d0af7c7210"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.252902 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-config-data" (OuterVolumeSpecName: "config-data") pod "2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" (UID: "2ea36c73-7cc2-4da4-a6f9-14d0af7c7210"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.345736 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m65p5\" (UniqueName: \"kubernetes.io/projected/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-kube-api-access-m65p5\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.345769 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.345779 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.525252 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.591346 4822 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 01 07:15:03 crc kubenswrapper[4822]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-01T07:14:55Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 01 07:15:03 crc kubenswrapper[4822]: /etc/init.d/functions: line 589: 421 Alarm clock "$@" Dec 01 07:15:03 crc kubenswrapper[4822]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-q78vl" message=< Dec 01 07:15:03 crc kubenswrapper[4822]: Exiting ovn-controller (1) [FAILED] Dec 01 07:15:03 crc kubenswrapper[4822]: Killing ovn-controller (1) [ OK ] Dec 01 07:15:03 crc kubenswrapper[4822]: Killing ovn-controller (1) with SIGKILL [ OK ] Dec 01 07:15:03 crc kubenswrapper[4822]: 2025-12-01T07:14:55Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 01 07:15:03 crc kubenswrapper[4822]: /etc/init.d/functions: line 589: 421 Alarm clock "$@" Dec 01 07:15:03 crc kubenswrapper[4822]: > Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.591398 4822 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 01 07:15:03 crc kubenswrapper[4822]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-01T07:14:55Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 01 07:15:03 crc kubenswrapper[4822]: /etc/init.d/functions: line 589: 421 Alarm clock "$@" Dec 01 07:15:03 crc kubenswrapper[4822]: > pod="openstack/ovn-controller-q78vl" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" containerID="cri-o://df2d6aa20ff2fa8c870d3a35b396ea04c4a1dde752f113ceeea95e9f64d72321" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.591432 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-q78vl" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" containerID="cri-o://df2d6aa20ff2fa8c870d3a35b396ea04c4a1dde752f113ceeea95e9f64d72321" gracePeriod=21 Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.652265 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5zk8\" (UniqueName: \"kubernetes.io/projected/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-kube-api-access-m5zk8\") pod \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.652331 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-combined-ca-bundle\") pod \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.652469 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-config-data\") pod \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\" (UID: \"a206e77a-0c4d-49bb-b6d9-c0d18990bd54\") " Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.671757 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-kube-api-access-m5zk8" (OuterVolumeSpecName: "kube-api-access-m5zk8") pod "a206e77a-0c4d-49bb-b6d9-c0d18990bd54" (UID: "a206e77a-0c4d-49bb-b6d9-c0d18990bd54"). InnerVolumeSpecName "kube-api-access-m5zk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.684208 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a206e77a-0c4d-49bb-b6d9-c0d18990bd54" (UID: "a206e77a-0c4d-49bb-b6d9-c0d18990bd54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.685655 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-config-data" (OuterVolumeSpecName: "config-data") pod "a206e77a-0c4d-49bb-b6d9-c0d18990bd54" (UID: "a206e77a-0c4d-49bb-b6d9-c0d18990bd54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.695515 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-q78vl_add830fb-5a2f-4cc2-8998-32ca893263db/ovn-controller/0.log" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.695587 4822 generic.go:334] "Generic (PLEG): container finished" podID="add830fb-5a2f-4cc2-8998-32ca893263db" containerID="df2d6aa20ff2fa8c870d3a35b396ea04c4a1dde752f113ceeea95e9f64d72321" exitCode=137 Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.695636 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl" event={"ID":"add830fb-5a2f-4cc2-8998-32ca893263db","Type":"ContainerDied","Data":"df2d6aa20ff2fa8c870d3a35b396ea04c4a1dde752f113ceeea95e9f64d72321"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.703282 4822 generic.go:334] "Generic (PLEG): container finished" podID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerID="95d7cb1b03cca33094490f26ae44fa9b54c6204f312055eaf24990307abf0a3d" exitCode=0 Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.703445 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerDied","Data":"95d7cb1b03cca33094490f26ae44fa9b54c6204f312055eaf24990307abf0a3d"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.719807 4822 generic.go:334] "Generic (PLEG): container finished" podID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" exitCode=0 Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.719902 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210","Type":"ContainerDied","Data":"e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.719934 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2ea36c73-7cc2-4da4-a6f9-14d0af7c7210","Type":"ContainerDied","Data":"2252be5f1e0cdc71190d795f6089aab7203918d3e5efadcdcd559b4d1ba84f22"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.719955 4822 scope.go:117] "RemoveContainer" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.720125 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.747845 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5875588964-pg9h2" event={"ID":"b422e423-4e50-4e96-a341-d7bb5188c4af","Type":"ContainerDied","Data":"80b95a27b54dc53f014f388b97e214223361e9740c2ce60a1998d7d39c72c3d7"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.748046 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5875588964-pg9h2" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.754590 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5zk8\" (UniqueName: \"kubernetes.io/projected/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-kube-api-access-m5zk8\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.754640 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.754653 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a206e77a-0c4d-49bb-b6d9-c0d18990bd54-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.755099 4822 generic.go:334] "Generic (PLEG): container finished" podID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" exitCode=0 Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.755249 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.755331 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a206e77a-0c4d-49bb-b6d9-c0d18990bd54","Type":"ContainerDied","Data":"aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.755379 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a206e77a-0c4d-49bb-b6d9-c0d18990bd54","Type":"ContainerDied","Data":"696ee146333e2119dcf516074a8b9d51c8302831c15bae32ab23e82b66c6dba0"} Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.771473 4822 scope.go:117] "RemoveContainer" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.771924 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5\": container with ID starting with e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5 not found: ID does not exist" containerID="e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.771960 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5"} err="failed to get container status \"e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5\": rpc error: code = NotFound desc = could not find container \"e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5\": container with ID starting with e81141f0de640de3350f56f7ed0a122ea457d0697aa5bd5907103a0c36480ca5 not found: ID does not exist" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.771986 4822 scope.go:117] "RemoveContainer" containerID="65c8f8c2ad754b11428eef89af4d4e06df73f318c729e1210811528d1ba1f58d" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.821428 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.832958 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.840102 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5875588964-pg9h2"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.843260 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5875588964-pg9h2"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.856852 4822 scope.go:117] "RemoveContainer" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.896410 4822 scope.go:117] "RemoveContainer" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" Dec 01 07:15:03 crc kubenswrapper[4822]: E1201 07:15:03.897048 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c\": container with ID starting with aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c not found: ID does not exist" containerID="aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.897090 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c"} err="failed to get container status \"aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c\": rpc error: code = NotFound desc = could not find container \"aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c\": container with ID starting with aade6e529a4ee1d6e23505c38fe29971bca8401e121c68ee5d191c64ebd27f2c not found: ID does not exist" Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.897137 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:15:03 crc kubenswrapper[4822]: I1201 07:15:03.902374 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.017819 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="22f94321-d0ce-48f5-82fa-a0b60b5b1dd3" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.037083 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.119346 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-q78vl_add830fb-5a2f-4cc2-8998-32ca893263db/ovn-controller/0.log" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.119428 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.123268 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.165519 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zmh9\" (UniqueName: \"kubernetes.io/projected/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-kube-api-access-6zmh9\") pod \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.165635 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-config-data\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.165748 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-run-httpd\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166066 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp42f\" (UniqueName: \"kubernetes.io/projected/f82bf765-7694-4ad6-8680-258c9e96cde0-kube-api-access-kp42f\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166148 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-ceilometer-tls-certs\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166195 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-secret-volume\") pod \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166257 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-log-httpd\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166306 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-log-ovn\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166336 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-sg-core-conf-yaml\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166361 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-ovn-controller-tls-certs\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166387 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-combined-ca-bundle\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.166584 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167052 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167599 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167628 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fvlg\" (UniqueName: \"kubernetes.io/projected/add830fb-5a2f-4cc2-8998-32ca893263db-kube-api-access-4fvlg\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167668 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/add830fb-5a2f-4cc2-8998-32ca893263db-scripts\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167679 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167698 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-scripts\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167783 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run-ovn\") pod \"add830fb-5a2f-4cc2-8998-32ca893263db\" (UID: \"add830fb-5a2f-4cc2-8998-32ca893263db\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167812 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-config-volume\") pod \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\" (UID: \"a1033bda-45cf-46f7-b21d-1d12a4a4a33a\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.167853 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-combined-ca-bundle\") pod \"f82bf765-7694-4ad6-8680-258c9e96cde0\" (UID: \"f82bf765-7694-4ad6-8680-258c9e96cde0\") " Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.168474 4822 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.168496 4822 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f82bf765-7694-4ad6-8680-258c9e96cde0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.168506 4822 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.168878 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run" (OuterVolumeSpecName: "var-run") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.168928 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.170179 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-config-volume" (OuterVolumeSpecName: "config-volume") pod "a1033bda-45cf-46f7-b21d-1d12a4a4a33a" (UID: "a1033bda-45cf-46f7-b21d-1d12a4a4a33a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.171273 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-kube-api-access-6zmh9" (OuterVolumeSpecName: "kube-api-access-6zmh9") pod "a1033bda-45cf-46f7-b21d-1d12a4a4a33a" (UID: "a1033bda-45cf-46f7-b21d-1d12a4a4a33a"). InnerVolumeSpecName "kube-api-access-6zmh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.172452 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/add830fb-5a2f-4cc2-8998-32ca893263db-scripts" (OuterVolumeSpecName: "scripts") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.175811 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/add830fb-5a2f-4cc2-8998-32ca893263db-kube-api-access-4fvlg" (OuterVolumeSpecName: "kube-api-access-4fvlg") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "kube-api-access-4fvlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.177180 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a1033bda-45cf-46f7-b21d-1d12a4a4a33a" (UID: "a1033bda-45cf-46f7-b21d-1d12a4a4a33a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.177504 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f82bf765-7694-4ad6-8680-258c9e96cde0-kube-api-access-kp42f" (OuterVolumeSpecName: "kube-api-access-kp42f") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "kube-api-access-kp42f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.178951 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-scripts" (OuterVolumeSpecName: "scripts") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.192984 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.207352 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.232824 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.238809 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.245421 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-config-data" (OuterVolumeSpecName: "config-data") pod "f82bf765-7694-4ad6-8680-258c9e96cde0" (UID: "f82bf765-7694-4ad6-8680-258c9e96cde0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.251612 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "add830fb-5a2f-4cc2-8998-32ca893263db" (UID: "add830fb-5a2f-4cc2-8998-32ca893263db"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269669 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/add830fb-5a2f-4cc2-8998-32ca893263db-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269695 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269704 4822 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269716 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269726 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269735 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zmh9\" (UniqueName: \"kubernetes.io/projected/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-kube-api-access-6zmh9\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269761 4822 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269772 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp42f\" (UniqueName: \"kubernetes.io/projected/f82bf765-7694-4ad6-8680-258c9e96cde0-kube-api-access-kp42f\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269780 4822 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269788 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a1033bda-45cf-46f7-b21d-1d12a4a4a33a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269798 4822 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f82bf765-7694-4ad6-8680-258c9e96cde0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269806 4822 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269814 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/add830fb-5a2f-4cc2-8998-32ca893263db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269822 4822 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/add830fb-5a2f-4cc2-8998-32ca893263db-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.269830 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fvlg\" (UniqueName: \"kubernetes.io/projected/add830fb-5a2f-4cc2-8998-32ca893263db-kube-api-access-4fvlg\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.768208 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.768237 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f82bf765-7694-4ad6-8680-258c9e96cde0","Type":"ContainerDied","Data":"6e476699400af4464aecc58bea0ba03ed71ad41942863dc57b72efa7230c28e3"} Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.768802 4822 scope.go:117] "RemoveContainer" containerID="ee62047f058f3d3b037c7b5c164a2486cf6fd7832ad234039229326d5da5cb6e" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.800981 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-q78vl_add830fb-5a2f-4cc2-8998-32ca893263db/ovn-controller/0.log" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.801097 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-q78vl" event={"ID":"add830fb-5a2f-4cc2-8998-32ca893263db","Type":"ContainerDied","Data":"ccc98990c366b3efc3c1761b077d7d9ba2c9ae7570eb83510ac1bfa4f323df8f"} Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.801120 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-q78vl" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.810778 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" event={"ID":"a1033bda-45cf-46f7-b21d-1d12a4a4a33a","Type":"ContainerDied","Data":"6f5525c0e7d091e9715a61d26a10463a1f4efb78fa1c0511b5861c6795ccc151"} Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.810851 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f5525c0e7d091e9715a61d26a10463a1f4efb78fa1c0511b5861c6795ccc151" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.810906 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.813184 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.819240 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.823808 4822 scope.go:117] "RemoveContainer" containerID="6f031e1b4b2e282b4cd48d5d64210b11a8231bd55a4051a1a21da63d57205bab" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.897766 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-q78vl"] Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.904222 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-q78vl"] Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.911316 4822 scope.go:117] "RemoveContainer" containerID="95d7cb1b03cca33094490f26ae44fa9b54c6204f312055eaf24990307abf0a3d" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.933897 4822 scope.go:117] "RemoveContainer" containerID="232bfa3723a8e9ea8d1812e63175355599297eaec99462bd5a8dbce62dfd5a9f" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.957488 4822 scope.go:117] "RemoveContainer" containerID="df2d6aa20ff2fa8c870d3a35b396ea04c4a1dde752f113ceeea95e9f64d72321" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.960622 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" path="/var/lib/kubelet/pods/2ea36c73-7cc2-4da4-a6f9-14d0af7c7210/volumes" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.961698 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a38c501-78bf-41cc-8caa-cf049fe4821d" path="/var/lib/kubelet/pods/4a38c501-78bf-41cc-8caa-cf049fe4821d/volumes" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.962835 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61ff107a-0e1c-4c16-b842-7ee341347c9e" path="/var/lib/kubelet/pods/61ff107a-0e1c-4c16-b842-7ee341347c9e/volumes" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.963855 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" path="/var/lib/kubelet/pods/a206e77a-0c4d-49bb-b6d9-c0d18990bd54/volumes" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.965773 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" path="/var/lib/kubelet/pods/add830fb-5a2f-4cc2-8998-32ca893263db/volumes" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.967309 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b422e423-4e50-4e96-a341-d7bb5188c4af" path="/var/lib/kubelet/pods/b422e423-4e50-4e96-a341-d7bb5188c4af/volumes" Dec 01 07:15:04 crc kubenswrapper[4822]: I1201 07:15:04.968506 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" path="/var/lib/kubelet/pods/f82bf765-7694-4ad6-8680-258c9e96cde0/volumes" Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.427251 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.429118 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.429695 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.429933 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.429751 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.432734 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.434837 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.435006 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.799922 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:05 crc kubenswrapper[4822]: E1201 07:15:05.800399 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:15:13.800367935 +0000 UTC m=+1469.121175661 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:15:06 crc kubenswrapper[4822]: E1201 07:15:06.914383 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:06 crc kubenswrapper[4822]: E1201 07:15:06.914502 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts podName:a794a063-7f9d-4a0f-9cf7-ae70b70769eb nodeName:}" failed. No retries permitted until 2025-12-01 07:15:14.914474278 +0000 UTC m=+1470.235282004 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts") pod "placement75d6-account-delete-46gbh" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb") : configmap "openstack-scripts" not found Dec 01 07:15:07 crc kubenswrapper[4822]: E1201 07:15:07.218438 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:07 crc kubenswrapper[4822]: E1201 07:15:07.218514 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts podName:b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:15.218499787 +0000 UTC m=+1470.539307473 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts") pod "neutron40f1-account-delete-rb6h6" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3") : configmap "openstack-scripts" not found Dec 01 07:15:07 crc kubenswrapper[4822]: E1201 07:15:07.218454 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:07 crc kubenswrapper[4822]: E1201 07:15:07.218668 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts podName:b1312168-fba0-46d6-8ca3-346303262924 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:15.218644571 +0000 UTC m=+1470.539452448 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts") pod "novaapi33d4-account-delete-5vtpn" (UID: "b1312168-fba0-46d6-8ca3-346303262924") : configmap "openstack-scripts" not found Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.427383 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.428473 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.428883 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.428931 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.429986 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.433397 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.434631 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:10 crc kubenswrapper[4822]: E1201 07:15:10.434695 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:15:12 crc kubenswrapper[4822]: I1201 07:15:12.542622 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:15:12 crc kubenswrapper[4822]: I1201 07:15:12.542724 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:15:13 crc kubenswrapper[4822]: I1201 07:15:13.303823 4822 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-54446c57b5-gw6bc" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9696/\": dial tcp 10.217.0.150:9696: connect: connection refused" Dec 01 07:15:13 crc kubenswrapper[4822]: E1201 07:15:13.825749 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:13 crc kubenswrapper[4822]: E1201 07:15:13.825817 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:15:29.825802412 +0000 UTC m=+1485.146610098 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:15:14 crc kubenswrapper[4822]: E1201 07:15:14.960317 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:14 crc kubenswrapper[4822]: E1201 07:15:14.960426 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts podName:a794a063-7f9d-4a0f-9cf7-ae70b70769eb nodeName:}" failed. No retries permitted until 2025-12-01 07:15:30.960405153 +0000 UTC m=+1486.281212849 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts") pod "placement75d6-account-delete-46gbh" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb") : configmap "openstack-scripts" not found Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.264695 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.265007 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts podName:b1312168-fba0-46d6-8ca3-346303262924 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:31.264991408 +0000 UTC m=+1486.585799084 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts") pod "novaapi33d4-account-delete-5vtpn" (UID: "b1312168-fba0-46d6-8ca3-346303262924") : configmap "openstack-scripts" not found Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.264743 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.265059 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts podName:b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3 nodeName:}" failed. No retries permitted until 2025-12-01 07:15:31.26504777 +0000 UTC m=+1486.585855456 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts") pod "neutron40f1-account-delete-rb6h6" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3") : configmap "openstack-scripts" not found Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.426924 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.427213 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.427450 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.427504 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.427790 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.430137 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.431191 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:15 crc kubenswrapper[4822]: E1201 07:15:15.431230 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:15:16 crc kubenswrapper[4822]: I1201 07:15:16.973589 4822 generic.go:334] "Generic (PLEG): container finished" podID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerID="995ca5fdebaa312592d184b7f0d85a781106462fc236fe76b19b5c65128c9922" exitCode=0 Dec 01 07:15:16 crc kubenswrapper[4822]: I1201 07:15:16.973656 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54446c57b5-gw6bc" event={"ID":"4e7558a6-6804-48af-b74d-394b7c5dd57e","Type":"ContainerDied","Data":"995ca5fdebaa312592d184b7f0d85a781106462fc236fe76b19b5c65128c9922"} Dec 01 07:15:16 crc kubenswrapper[4822]: I1201 07:15:16.974048 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54446c57b5-gw6bc" event={"ID":"4e7558a6-6804-48af-b74d-394b7c5dd57e","Type":"ContainerDied","Data":"401ddc270177763ff4738a35dce8db7e419fab267f7c8ebd9abf6b81b9a48ddd"} Dec 01 07:15:16 crc kubenswrapper[4822]: I1201 07:15:16.974078 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="401ddc270177763ff4738a35dce8db7e419fab267f7c8ebd9abf6b81b9a48ddd" Dec 01 07:15:16 crc kubenswrapper[4822]: I1201 07:15:16.992587 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.194782 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-config\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.196074 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-ovndb-tls-certs\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.196131 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-internal-tls-certs\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.196233 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85r5r\" (UniqueName: \"kubernetes.io/projected/4e7558a6-6804-48af-b74d-394b7c5dd57e-kube-api-access-85r5r\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.196279 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-combined-ca-bundle\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.196467 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-httpd-config\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.196595 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-public-tls-certs\") pod \"4e7558a6-6804-48af-b74d-394b7c5dd57e\" (UID: \"4e7558a6-6804-48af-b74d-394b7c5dd57e\") " Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.204074 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.215808 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7558a6-6804-48af-b74d-394b7c5dd57e-kube-api-access-85r5r" (OuterVolumeSpecName: "kube-api-access-85r5r") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "kube-api-access-85r5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.250348 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.256946 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-config" (OuterVolumeSpecName: "config") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.283203 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.287087 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.297960 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85r5r\" (UniqueName: \"kubernetes.io/projected/4e7558a6-6804-48af-b74d-394b7c5dd57e-kube-api-access-85r5r\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.297992 4822 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.298001 4822 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.298010 4822 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.298019 4822 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.298028 4822 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.300770 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "4e7558a6-6804-48af-b74d-394b7c5dd57e" (UID: "4e7558a6-6804-48af-b74d-394b7c5dd57e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.400492 4822 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e7558a6-6804-48af-b74d-394b7c5dd57e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:17 crc kubenswrapper[4822]: I1201 07:15:17.983687 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54446c57b5-gw6bc" Dec 01 07:15:18 crc kubenswrapper[4822]: I1201 07:15:18.012886 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-54446c57b5-gw6bc"] Dec 01 07:15:18 crc kubenswrapper[4822]: I1201 07:15:18.020516 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-54446c57b5-gw6bc"] Dec 01 07:15:18 crc kubenswrapper[4822]: I1201 07:15:18.968541 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" path="/var/lib/kubelet/pods/4e7558a6-6804-48af-b74d-394b7c5dd57e/volumes" Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.427374 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.427990 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.428601 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.428723 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.430323 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.432819 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.435782 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:20 crc kubenswrapper[4822]: E1201 07:15:20.435889 4822 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.600711 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.737048 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.737124 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-lock\") pod \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.737207 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcd8h\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-kube-api-access-tcd8h\") pod \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.737243 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") pod \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.737313 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-cache\") pod \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\" (UID: \"b21643f1-db8c-4613-ac29-f1d4d0970b7b\") " Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.737764 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-lock" (OuterVolumeSpecName: "lock") pod "b21643f1-db8c-4613-ac29-f1d4d0970b7b" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.739277 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-cache" (OuterVolumeSpecName: "cache") pod "b21643f1-db8c-4613-ac29-f1d4d0970b7b" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.743776 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "swift") pod "b21643f1-db8c-4613-ac29-f1d4d0970b7b" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.745083 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-kube-api-access-tcd8h" (OuterVolumeSpecName: "kube-api-access-tcd8h") pod "b21643f1-db8c-4613-ac29-f1d4d0970b7b" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b"). InnerVolumeSpecName "kube-api-access-tcd8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.745706 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b21643f1-db8c-4613-ac29-f1d4d0970b7b" (UID: "b21643f1-db8c-4613-ac29-f1d4d0970b7b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.839807 4822 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.840639 4822 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-lock\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.840692 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcd8h\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-kube-api-access-tcd8h\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.840710 4822 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b21643f1-db8c-4613-ac29-f1d4d0970b7b-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.840726 4822 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b21643f1-db8c-4613-ac29-f1d4d0970b7b-cache\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.861841 4822 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 01 07:15:24 crc kubenswrapper[4822]: I1201 07:15:24.947408 4822 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.067495 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.067680 4822 generic.go:334] "Generic (PLEG): container finished" podID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerID="b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed" exitCode=137 Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.067513 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed"} Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.067951 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b21643f1-db8c-4613-ac29-f1d4d0970b7b","Type":"ContainerDied","Data":"65ccad1bf4c8dff4313c68c60a071fe45b41c30286d603997e9e03dc886f8779"} Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.068006 4822 scope.go:117] "RemoveContainer" containerID="b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.100973 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.101933 4822 scope.go:117] "RemoveContainer" containerID="cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.106137 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.126383 4822 scope.go:117] "RemoveContainer" containerID="094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.157870 4822 scope.go:117] "RemoveContainer" containerID="c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.184156 4822 scope.go:117] "RemoveContainer" containerID="e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.203884 4822 scope.go:117] "RemoveContainer" containerID="13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.233920 4822 scope.go:117] "RemoveContainer" containerID="8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.255933 4822 scope.go:117] "RemoveContainer" containerID="39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.279674 4822 scope.go:117] "RemoveContainer" containerID="c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.297842 4822 scope.go:117] "RemoveContainer" containerID="0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.320275 4822 scope.go:117] "RemoveContainer" containerID="2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.336861 4822 scope.go:117] "RemoveContainer" containerID="cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.359862 4822 scope.go:117] "RemoveContainer" containerID="d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.386980 4822 scope.go:117] "RemoveContainer" containerID="cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.426518 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.426568 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0 is running failed: container process not found" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.427072 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0 is running failed: container process not found" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.427095 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.427913 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0 is running failed: container process not found" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.427948 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.427953 4822 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.427999 4822 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-5prf9" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.491770 4822 scope.go:117] "RemoveContainer" containerID="4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.516521 4822 scope.go:117] "RemoveContainer" containerID="b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.517133 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed\": container with ID starting with b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed not found: ID does not exist" containerID="b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.517179 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed"} err="failed to get container status \"b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed\": rpc error: code = NotFound desc = could not find container \"b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed\": container with ID starting with b3f5745360d6d305fc4d050f988dcb8aefcef25e191d970374aca79af23fabed not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.517212 4822 scope.go:117] "RemoveContainer" containerID="cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.517635 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec\": container with ID starting with cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec not found: ID does not exist" containerID="cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.517712 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec"} err="failed to get container status \"cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec\": rpc error: code = NotFound desc = could not find container \"cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec\": container with ID starting with cdbc3f7a622ebf911059b3f226086eaae1ad2df6eb3855a7ec556211ada6f7ec not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.517761 4822 scope.go:117] "RemoveContainer" containerID="094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.518281 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864\": container with ID starting with 094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864 not found: ID does not exist" containerID="094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.518317 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864"} err="failed to get container status \"094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864\": rpc error: code = NotFound desc = could not find container \"094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864\": container with ID starting with 094ff267e76c6c6f0d02eee01fd56a723239ccd6ecdb1b741129fc1b52d74864 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.518340 4822 scope.go:117] "RemoveContainer" containerID="c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.518712 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1\": container with ID starting with c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1 not found: ID does not exist" containerID="c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.518796 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1"} err="failed to get container status \"c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1\": rpc error: code = NotFound desc = could not find container \"c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1\": container with ID starting with c3702e870cf8a6b9f05b0d79441ee632883f9e3a9698aa025a07270f8856e2e1 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.518823 4822 scope.go:117] "RemoveContainer" containerID="e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.519310 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad\": container with ID starting with e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad not found: ID does not exist" containerID="e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.519354 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad"} err="failed to get container status \"e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad\": rpc error: code = NotFound desc = could not find container \"e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad\": container with ID starting with e75293462759e7e85f7d7f79e336a1542aba2c22951ac6abcc7b237282f57dad not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.519381 4822 scope.go:117] "RemoveContainer" containerID="13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.519914 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974\": container with ID starting with 13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974 not found: ID does not exist" containerID="13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.519951 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974"} err="failed to get container status \"13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974\": rpc error: code = NotFound desc = could not find container \"13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974\": container with ID starting with 13d34aa7a41bded5b4423f54f246dec188fecd4d8743de583cabe641812de974 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.519974 4822 scope.go:117] "RemoveContainer" containerID="8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.520504 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7\": container with ID starting with 8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7 not found: ID does not exist" containerID="8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.520530 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7"} err="failed to get container status \"8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7\": rpc error: code = NotFound desc = could not find container \"8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7\": container with ID starting with 8f869fbabcf366be6042b703be4884d7980bbad433bcefe4e178cb7d170123a7 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.520569 4822 scope.go:117] "RemoveContainer" containerID="39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.520897 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347\": container with ID starting with 39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347 not found: ID does not exist" containerID="39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.520922 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347"} err="failed to get container status \"39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347\": rpc error: code = NotFound desc = could not find container \"39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347\": container with ID starting with 39435bb2e34ed9c1b320a08f0ef40788d6a4e6ed5d0cc76da2df12cd333ad347 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.520938 4822 scope.go:117] "RemoveContainer" containerID="c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.521209 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147\": container with ID starting with c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147 not found: ID does not exist" containerID="c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.521234 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147"} err="failed to get container status \"c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147\": rpc error: code = NotFound desc = could not find container \"c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147\": container with ID starting with c80567a75478bb3595b6efacc2618213d0ca28604b12588f4dff85b9f8147147 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.521249 4822 scope.go:117] "RemoveContainer" containerID="0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.521504 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905\": container with ID starting with 0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905 not found: ID does not exist" containerID="0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.521577 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905"} err="failed to get container status \"0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905\": rpc error: code = NotFound desc = could not find container \"0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905\": container with ID starting with 0f698f0ca95eba525370099947f99cf3a4d149f457291baa9dc498f6db289905 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.521607 4822 scope.go:117] "RemoveContainer" containerID="2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.521947 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151\": container with ID starting with 2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151 not found: ID does not exist" containerID="2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.521977 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151"} err="failed to get container status \"2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151\": rpc error: code = NotFound desc = could not find container \"2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151\": container with ID starting with 2632222b4d0832cb6b1bf28bab889556435ad406e71330dcbb1ce7269be96151 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.521996 4822 scope.go:117] "RemoveContainer" containerID="cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.522378 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12\": container with ID starting with cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12 not found: ID does not exist" containerID="cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.522422 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12"} err="failed to get container status \"cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12\": rpc error: code = NotFound desc = could not find container \"cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12\": container with ID starting with cb303b9f7ece2ef6c7fb785d081e1b203d53ef6049bd3327cc87fcd417f01b12 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.522452 4822 scope.go:117] "RemoveContainer" containerID="d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.522783 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374\": container with ID starting with d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374 not found: ID does not exist" containerID="d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.522810 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374"} err="failed to get container status \"d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374\": rpc error: code = NotFound desc = could not find container \"d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374\": container with ID starting with d8d81c0316835022d99b80c015b9f1ac27c6b3935a543267f17edd4e74729374 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.522864 4822 scope.go:117] "RemoveContainer" containerID="cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.523176 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8\": container with ID starting with cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8 not found: ID does not exist" containerID="cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.523210 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8"} err="failed to get container status \"cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8\": rpc error: code = NotFound desc = could not find container \"cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8\": container with ID starting with cf16eff1ae13259903620a76296b8e7154eeb9d4756b6a6aa2582dc287c993c8 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.523228 4822 scope.go:117] "RemoveContainer" containerID="4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3" Dec 01 07:15:25 crc kubenswrapper[4822]: E1201 07:15:25.523733 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3\": container with ID starting with 4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3 not found: ID does not exist" containerID="4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.523768 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3"} err="failed to get container status \"4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3\": rpc error: code = NotFound desc = could not find container \"4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3\": container with ID starting with 4495efc68907c9236a36546cd4e20eb9f2aaf5b240e423e8e3edd1b8119f2ef3 not found: ID does not exist" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.857130 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5prf9_27be7b1c-254b-4dd5-8889-1373d3281e64/ovs-vswitchd/0.log" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.858099 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963359 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-run\") pod \"27be7b1c-254b-4dd5-8889-1373d3281e64\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963410 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27be7b1c-254b-4dd5-8889-1373d3281e64-scripts\") pod \"27be7b1c-254b-4dd5-8889-1373d3281e64\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963452 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-run" (OuterVolumeSpecName: "var-run") pod "27be7b1c-254b-4dd5-8889-1373d3281e64" (UID: "27be7b1c-254b-4dd5-8889-1373d3281e64"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963533 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6wch\" (UniqueName: \"kubernetes.io/projected/27be7b1c-254b-4dd5-8889-1373d3281e64-kube-api-access-v6wch\") pod \"27be7b1c-254b-4dd5-8889-1373d3281e64\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963601 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-etc-ovs\") pod \"27be7b1c-254b-4dd5-8889-1373d3281e64\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963653 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-lib\") pod \"27be7b1c-254b-4dd5-8889-1373d3281e64\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963713 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-log\") pod \"27be7b1c-254b-4dd5-8889-1373d3281e64\" (UID: \"27be7b1c-254b-4dd5-8889-1373d3281e64\") " Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963791 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "27be7b1c-254b-4dd5-8889-1373d3281e64" (UID: "27be7b1c-254b-4dd5-8889-1373d3281e64"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963823 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-lib" (OuterVolumeSpecName: "var-lib") pod "27be7b1c-254b-4dd5-8889-1373d3281e64" (UID: "27be7b1c-254b-4dd5-8889-1373d3281e64"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.963993 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-log" (OuterVolumeSpecName: "var-log") pod "27be7b1c-254b-4dd5-8889-1373d3281e64" (UID: "27be7b1c-254b-4dd5-8889-1373d3281e64"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.964207 4822 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-etc-ovs\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.964231 4822 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-lib\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.964251 4822 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-log\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.964269 4822 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27be7b1c-254b-4dd5-8889-1373d3281e64-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.965509 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27be7b1c-254b-4dd5-8889-1373d3281e64-scripts" (OuterVolumeSpecName: "scripts") pod "27be7b1c-254b-4dd5-8889-1373d3281e64" (UID: "27be7b1c-254b-4dd5-8889-1373d3281e64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:25 crc kubenswrapper[4822]: I1201 07:15:25.967760 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27be7b1c-254b-4dd5-8889-1373d3281e64-kube-api-access-v6wch" (OuterVolumeSpecName: "kube-api-access-v6wch") pod "27be7b1c-254b-4dd5-8889-1373d3281e64" (UID: "27be7b1c-254b-4dd5-8889-1373d3281e64"). InnerVolumeSpecName "kube-api-access-v6wch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.065936 4822 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27be7b1c-254b-4dd5-8889-1373d3281e64-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.065991 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6wch\" (UniqueName: \"kubernetes.io/projected/27be7b1c-254b-4dd5-8889-1373d3281e64-kube-api-access-v6wch\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.082362 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5prf9_27be7b1c-254b-4dd5-8889-1373d3281e64/ovs-vswitchd/0.log" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.084367 4822 generic.go:334] "Generic (PLEG): container finished" podID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" exitCode=137 Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.084469 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerDied","Data":"c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0"} Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.084484 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5prf9" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.084511 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5prf9" event={"ID":"27be7b1c-254b-4dd5-8889-1373d3281e64","Type":"ContainerDied","Data":"9702b1e37b18f11a39a73a0540181eade0bf1cb0369eec7100718fedeeac86d9"} Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.084597 4822 scope.go:117] "RemoveContainer" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.134598 4822 scope.go:117] "RemoveContainer" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.149507 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-5prf9"] Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.161129 4822 scope.go:117] "RemoveContainer" containerID="1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.164709 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-5prf9"] Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.188525 4822 scope.go:117] "RemoveContainer" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" Dec 01 07:15:26 crc kubenswrapper[4822]: E1201 07:15:26.188955 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0\": container with ID starting with c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0 not found: ID does not exist" containerID="c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.189031 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0"} err="failed to get container status \"c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0\": rpc error: code = NotFound desc = could not find container \"c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0\": container with ID starting with c3a97f3d7ee5385f5191e5abc4f0d3c6b654551778e8cccba718fada7129d5b0 not found: ID does not exist" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.189063 4822 scope.go:117] "RemoveContainer" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" Dec 01 07:15:26 crc kubenswrapper[4822]: E1201 07:15:26.189512 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05\": container with ID starting with 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 not found: ID does not exist" containerID="2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.189531 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05"} err="failed to get container status \"2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05\": rpc error: code = NotFound desc = could not find container \"2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05\": container with ID starting with 2cdf644751fd1219b8a221d070931aef4a5403df125ae5017f410cc6702b0e05 not found: ID does not exist" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.189606 4822 scope.go:117] "RemoveContainer" containerID="1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5" Dec 01 07:15:26 crc kubenswrapper[4822]: E1201 07:15:26.189994 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5\": container with ID starting with 1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5 not found: ID does not exist" containerID="1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.190022 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5"} err="failed to get container status \"1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5\": rpc error: code = NotFound desc = could not find container \"1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5\": container with ID starting with 1affd40541c7e8157e218b9304310f8b68ceaf6708ad6cf840d000c28ae5a8b5 not found: ID does not exist" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.970202 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" path="/var/lib/kubelet/pods/27be7b1c-254b-4dd5-8889-1373d3281e64/volumes" Dec 01 07:15:26 crc kubenswrapper[4822]: I1201 07:15:26.971604 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" path="/var/lib/kubelet/pods/b21643f1-db8c-4613-ac29-f1d4d0970b7b/volumes" Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.532146 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.722431 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7p9f\" (UniqueName: \"kubernetes.io/projected/a3f05675-879d-4586-af90-8aa6b11ad8a3-kube-api-access-w7p9f\") pod \"a3f05675-879d-4586-af90-8aa6b11ad8a3\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.722661 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f05675-879d-4586-af90-8aa6b11ad8a3-operator-scripts\") pod \"a3f05675-879d-4586-af90-8aa6b11ad8a3\" (UID: \"a3f05675-879d-4586-af90-8aa6b11ad8a3\") " Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.723813 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3f05675-879d-4586-af90-8aa6b11ad8a3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a3f05675-879d-4586-af90-8aa6b11ad8a3" (UID: "a3f05675-879d-4586-af90-8aa6b11ad8a3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.734469 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f05675-879d-4586-af90-8aa6b11ad8a3-kube-api-access-w7p9f" (OuterVolumeSpecName: "kube-api-access-w7p9f") pod "a3f05675-879d-4586-af90-8aa6b11ad8a3" (UID: "a3f05675-879d-4586-af90-8aa6b11ad8a3"). InnerVolumeSpecName "kube-api-access-w7p9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.825323 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f05675-879d-4586-af90-8aa6b11ad8a3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.825719 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7p9f\" (UniqueName: \"kubernetes.io/projected/a3f05675-879d-4586-af90-8aa6b11ad8a3-kube-api-access-w7p9f\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:29 crc kubenswrapper[4822]: E1201 07:15:29.928024 4822 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 01 07:15:29 crc kubenswrapper[4822]: E1201 07:15:29.928449 4822 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts podName:2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf nodeName:}" failed. No retries permitted until 2025-12-01 07:16:01.928382681 +0000 UTC m=+1517.249190407 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts") pod "barbican6e4d-account-delete-bz5tp" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf") : configmap "openstack-scripts" not found Dec 01 07:15:29 crc kubenswrapper[4822]: I1201 07:15:29.964953 4822 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","poda1df0a4e-4359-436c-9937-e4af9b500ae5"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort poda1df0a4e-4359-436c-9937-e4af9b500ae5] : Timed out while waiting for systemd to remove kubepods-besteffort-poda1df0a4e_4359_436c_9937_e4af9b500ae5.slice" Dec 01 07:15:29 crc kubenswrapper[4822]: E1201 07:15:29.965309 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort poda1df0a4e-4359-436c-9937-e4af9b500ae5] : unable to destroy cgroup paths for cgroup [kubepods besteffort poda1df0a4e-4359-436c-9937-e4af9b500ae5] : Timed out while waiting for systemd to remove kubepods-besteffort-poda1df0a4e_4359_436c_9937_e4af9b500ae5.slice" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.138001 4822 generic.go:334] "Generic (PLEG): container finished" podID="a3f05675-879d-4586-af90-8aa6b11ad8a3" containerID="ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2" exitCode=137 Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.138118 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.138117 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04972-account-delete-qmqf7" event={"ID":"a3f05675-879d-4586-af90-8aa6b11ad8a3","Type":"ContainerDied","Data":"ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2"} Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.138266 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04972-account-delete-qmqf7" event={"ID":"a3f05675-879d-4586-af90-8aa6b11ad8a3","Type":"ContainerDied","Data":"0b161634f276a0b4d892f2bb8e27c8e6f2449fab5e657e2c3ce5ddabe9fb59b3"} Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.138312 4822 scope.go:117] "RemoveContainer" containerID="ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.138822 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04972-account-delete-qmqf7" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.181813 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6"] Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.188581 4822 scope.go:117] "RemoveContainer" containerID="ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2" Dec 01 07:15:30 crc kubenswrapper[4822]: E1201 07:15:30.189224 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2\": container with ID starting with ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2 not found: ID does not exist" containerID="ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.189273 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2"} err="failed to get container status \"ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2\": rpc error: code = NotFound desc = could not find container \"ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2\": container with ID starting with ff703c96e34c2c8bf546d2ac9e5ff9717d4a7ff3ffbcc799b0e079d2f9703ee2 not found: ID does not exist" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.196780 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-67cf4fc9cd-s6rh6"] Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.207873 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell04972-account-delete-qmqf7"] Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.221289 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell04972-account-delete-qmqf7"] Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.868051 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.935341 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.948006 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.964501 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1df0a4e-4359-436c-9937-e4af9b500ae5" path="/var/lib/kubelet/pods/a1df0a4e-4359-436c-9937-e4af9b500ae5/volumes" Dec 01 07:15:30 crc kubenswrapper[4822]: I1201 07:15:30.965271 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3f05675-879d-4586-af90-8aa6b11ad8a3" path="/var/lib/kubelet/pods/a3f05675-879d-4586-af90-8aa6b11ad8a3/volumes" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.049995 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts\") pod \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.050086 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7667\" (UniqueName: \"kubernetes.io/projected/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-kube-api-access-h7667\") pod \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\" (UID: \"a794a063-7f9d-4a0f-9cf7-ae70b70769eb\") " Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.050145 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cphjg\" (UniqueName: \"kubernetes.io/projected/b1312168-fba0-46d6-8ca3-346303262924-kube-api-access-cphjg\") pod \"b1312168-fba0-46d6-8ca3-346303262924\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.050179 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8858\" (UniqueName: \"kubernetes.io/projected/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-kube-api-access-k8858\") pod \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.050201 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts\") pod \"b1312168-fba0-46d6-8ca3-346303262924\" (UID: \"b1312168-fba0-46d6-8ca3-346303262924\") " Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.050272 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts\") pod \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\" (UID: \"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3\") " Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.051164 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.051175 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a794a063-7f9d-4a0f-9cf7-ae70b70769eb" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.051257 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1312168-fba0-46d6-8ca3-346303262924" (UID: "b1312168-fba0-46d6-8ca3-346303262924"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.056318 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-kube-api-access-k8858" (OuterVolumeSpecName: "kube-api-access-k8858") pod "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" (UID: "b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3"). InnerVolumeSpecName "kube-api-access-k8858". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.056487 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-kube-api-access-h7667" (OuterVolumeSpecName: "kube-api-access-h7667") pod "a794a063-7f9d-4a0f-9cf7-ae70b70769eb" (UID: "a794a063-7f9d-4a0f-9cf7-ae70b70769eb"). InnerVolumeSpecName "kube-api-access-h7667". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.056627 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1312168-fba0-46d6-8ca3-346303262924-kube-api-access-cphjg" (OuterVolumeSpecName: "kube-api-access-cphjg") pod "b1312168-fba0-46d6-8ca3-346303262924" (UID: "b1312168-fba0-46d6-8ca3-346303262924"). InnerVolumeSpecName "kube-api-access-cphjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.147188 4822 generic.go:334] "Generic (PLEG): container finished" podID="b1312168-fba0-46d6-8ca3-346303262924" containerID="a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a" exitCode=137 Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.147267 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi33d4-account-delete-5vtpn" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.147294 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi33d4-account-delete-5vtpn" event={"ID":"b1312168-fba0-46d6-8ca3-346303262924","Type":"ContainerDied","Data":"a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a"} Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.147327 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi33d4-account-delete-5vtpn" event={"ID":"b1312168-fba0-46d6-8ca3-346303262924","Type":"ContainerDied","Data":"e6c7110e8325f7235ac87d26637cb57cdead35db732b81cae4f2dc8de8fe8b19"} Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.147342 4822 scope.go:117] "RemoveContainer" containerID="a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.149592 4822 generic.go:334] "Generic (PLEG): container finished" podID="a794a063-7f9d-4a0f-9cf7-ae70b70769eb" containerID="2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20" exitCode=137 Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.149617 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement75d6-account-delete-46gbh" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.149686 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement75d6-account-delete-46gbh" event={"ID":"a794a063-7f9d-4a0f-9cf7-ae70b70769eb","Type":"ContainerDied","Data":"2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20"} Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.149751 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement75d6-account-delete-46gbh" event={"ID":"a794a063-7f9d-4a0f-9cf7-ae70b70769eb","Type":"ContainerDied","Data":"e8234b092a243bd2f096eec12f5321c7dc8225c8e2ac28de49c5bdb6228d1edb"} Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.151224 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.151248 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.151257 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7667\" (UniqueName: \"kubernetes.io/projected/a794a063-7f9d-4a0f-9cf7-ae70b70769eb-kube-api-access-h7667\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.151269 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cphjg\" (UniqueName: \"kubernetes.io/projected/b1312168-fba0-46d6-8ca3-346303262924-kube-api-access-cphjg\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.151278 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8858\" (UniqueName: \"kubernetes.io/projected/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3-kube-api-access-k8858\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.151287 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1312168-fba0-46d6-8ca3-346303262924-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.152865 4822 generic.go:334] "Generic (PLEG): container finished" podID="b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" containerID="f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d" exitCode=137 Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.152926 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron40f1-account-delete-rb6h6" event={"ID":"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3","Type":"ContainerDied","Data":"f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d"} Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.152958 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron40f1-account-delete-rb6h6" event={"ID":"b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3","Type":"ContainerDied","Data":"c6814bb8297ec8d8dd45a1928c26602ef62d476aad12a763d0c989b1043fb60b"} Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.152972 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron40f1-account-delete-rb6h6" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.185766 4822 scope.go:117] "RemoveContainer" containerID="a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a" Dec 01 07:15:31 crc kubenswrapper[4822]: E1201 07:15:31.186446 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a\": container with ID starting with a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a not found: ID does not exist" containerID="a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.186507 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a"} err="failed to get container status \"a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a\": rpc error: code = NotFound desc = could not find container \"a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a\": container with ID starting with a1c19deb52d6600c61cae591fbcab2056dcabc1b7d9b442d02308f61bb1a3c0a not found: ID does not exist" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.186537 4822 scope.go:117] "RemoveContainer" containerID="2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.190833 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement75d6-account-delete-46gbh"] Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.198602 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement75d6-account-delete-46gbh"] Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.208025 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron40f1-account-delete-rb6h6"] Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.214356 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron40f1-account-delete-rb6h6"] Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.221750 4822 scope.go:117] "RemoveContainer" containerID="2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.221781 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi33d4-account-delete-5vtpn"] Dec 01 07:15:31 crc kubenswrapper[4822]: E1201 07:15:31.222149 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20\": container with ID starting with 2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20 not found: ID does not exist" containerID="2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.222180 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20"} err="failed to get container status \"2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20\": rpc error: code = NotFound desc = could not find container \"2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20\": container with ID starting with 2bd7ca3a8ddc0269a6bfdca2dd010019ca740da470b6ea4795198603b3b50c20 not found: ID does not exist" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.222200 4822 scope.go:117] "RemoveContainer" containerID="f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.228321 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi33d4-account-delete-5vtpn"] Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.240910 4822 scope.go:117] "RemoveContainer" containerID="f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d" Dec 01 07:15:31 crc kubenswrapper[4822]: E1201 07:15:31.241377 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d\": container with ID starting with f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d not found: ID does not exist" containerID="f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.241445 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d"} err="failed to get container status \"f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d\": rpc error: code = NotFound desc = could not find container \"f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d\": container with ID starting with f0301a107354fe6ed3aba44c81f5418b8f7d10a801749c0fe275a7167acb400d not found: ID does not exist" Dec 01 07:15:31 crc kubenswrapper[4822]: I1201 07:15:31.585849 4822 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod4508389c-d1b8-4646-902e-4fbb597de2b7"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod4508389c-d1b8-4646-902e-4fbb597de2b7] : Timed out while waiting for systemd to remove kubepods-besteffort-pod4508389c_d1b8_4646_902e_4fbb597de2b7.slice" Dec 01 07:15:32 crc kubenswrapper[4822]: I1201 07:15:32.969021 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a794a063-7f9d-4a0f-9cf7-ae70b70769eb" path="/var/lib/kubelet/pods/a794a063-7f9d-4a0f-9cf7-ae70b70769eb/volumes" Dec 01 07:15:32 crc kubenswrapper[4822]: I1201 07:15:32.971931 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1312168-fba0-46d6-8ca3-346303262924" path="/var/lib/kubelet/pods/b1312168-fba0-46d6-8ca3-346303262924/volumes" Dec 01 07:15:32 crc kubenswrapper[4822]: I1201 07:15:32.972935 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" path="/var/lib/kubelet/pods/b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3/volumes" Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.193761 4822 generic.go:334] "Generic (PLEG): container finished" podID="2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" containerID="f05b963fa90e51f441cfec47cbaa1a3c1e77f0b689982aacfe5510b1a4ea7520" exitCode=137 Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.193826 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6e4d-account-delete-bz5tp" event={"ID":"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf","Type":"ContainerDied","Data":"f05b963fa90e51f441cfec47cbaa1a3c1e77f0b689982aacfe5510b1a4ea7520"} Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.528948 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.693487 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts\") pod \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.693676 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb84b\" (UniqueName: \"kubernetes.io/projected/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-kube-api-access-tb84b\") pod \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\" (UID: \"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf\") " Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.695538 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.703062 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-kube-api-access-tb84b" (OuterVolumeSpecName: "kube-api-access-tb84b") pod "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" (UID: "2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf"). InnerVolumeSpecName "kube-api-access-tb84b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.795540 4822 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:33 crc kubenswrapper[4822]: I1201 07:15:33.795624 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb84b\" (UniqueName: \"kubernetes.io/projected/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf-kube-api-access-tb84b\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:34 crc kubenswrapper[4822]: I1201 07:15:34.209588 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6e4d-account-delete-bz5tp" event={"ID":"2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf","Type":"ContainerDied","Data":"050253d8054570e847cc50cf7a63c84bd44c541a39888e0f822ac53cedbed3f6"} Dec 01 07:15:34 crc kubenswrapper[4822]: I1201 07:15:34.209693 4822 scope.go:117] "RemoveContainer" containerID="f05b963fa90e51f441cfec47cbaa1a3c1e77f0b689982aacfe5510b1a4ea7520" Dec 01 07:15:34 crc kubenswrapper[4822]: I1201 07:15:34.209684 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6e4d-account-delete-bz5tp" Dec 01 07:15:34 crc kubenswrapper[4822]: I1201 07:15:34.265281 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican6e4d-account-delete-bz5tp"] Dec 01 07:15:34 crc kubenswrapper[4822]: I1201 07:15:34.270433 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican6e4d-account-delete-bz5tp"] Dec 01 07:15:34 crc kubenswrapper[4822]: I1201 07:15:34.964570 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" path="/var/lib/kubelet/pods/2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf/volumes" Dec 01 07:15:42 crc kubenswrapper[4822]: I1201 07:15:42.543360 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:15:42 crc kubenswrapper[4822]: I1201 07:15:42.544001 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:15:46 crc kubenswrapper[4822]: I1201 07:15:46.765342 4822 scope.go:117] "RemoveContainer" containerID="d49bbf181bc9328d72a73c564789a29b58507dddaa3e5e79ce55c2b497a8f7ef" Dec 01 07:16:12 crc kubenswrapper[4822]: I1201 07:16:12.542368 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:16:12 crc kubenswrapper[4822]: I1201 07:16:12.544747 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:16:12 crc kubenswrapper[4822]: I1201 07:16:12.544841 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:16:12 crc kubenswrapper[4822]: I1201 07:16:12.545880 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:16:12 crc kubenswrapper[4822]: I1201 07:16:12.546009 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" gracePeriod=600 Dec 01 07:16:12 crc kubenswrapper[4822]: E1201 07:16:12.676891 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:16:13 crc kubenswrapper[4822]: I1201 07:16:13.687223 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" exitCode=0 Dec 01 07:16:13 crc kubenswrapper[4822]: I1201 07:16:13.687287 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272"} Dec 01 07:16:13 crc kubenswrapper[4822]: I1201 07:16:13.687337 4822 scope.go:117] "RemoveContainer" containerID="cb68b029768127b77693597ceb7a762b8c61f40a1a25b9306ea20cdd9dcb63b5" Dec 01 07:16:13 crc kubenswrapper[4822]: I1201 07:16:13.688142 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:16:13 crc kubenswrapper[4822]: E1201 07:16:13.688811 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:16:24 crc kubenswrapper[4822]: I1201 07:16:24.955875 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:16:24 crc kubenswrapper[4822]: E1201 07:16:24.956738 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:16:36 crc kubenswrapper[4822]: I1201 07:16:36.951354 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:16:36 crc kubenswrapper[4822]: E1201 07:16:36.952421 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.511483 4822 scope.go:117] "RemoveContainer" containerID="bbb74df7e6125aca092a6af07124c2d09a96366bfe4d10cb7f87a04cced59681" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.559319 4822 scope.go:117] "RemoveContainer" containerID="67b4ee72481fb3afcfc3392e80b6461b38dd56f3a4807eaae7eb4e9cb55e7a0c" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.610647 4822 scope.go:117] "RemoveContainer" containerID="d36d158bda666c23ed6022000dfa5fbf2867d83d065c03a9293477def80aa751" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.647158 4822 scope.go:117] "RemoveContainer" containerID="b8a08041b8cc69c12a5f58bc931a20fd82d2fe38b1578f6ab130694d4022707d" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.698794 4822 scope.go:117] "RemoveContainer" containerID="f5ca2f96ae7a12beec4679c70a0fc08d1c3054a30bfc4e4071fda687d5f7b971" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.722577 4822 scope.go:117] "RemoveContainer" containerID="1a334e476417c310fa29ea5067dd72eaf0a9ff18b956d537dea22d61fde5ec46" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.781720 4822 scope.go:117] "RemoveContainer" containerID="fe6640858936a836864b4bbca0ccc0aebc36c4ab31fea176ffa9a0222c6ad393" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.810827 4822 scope.go:117] "RemoveContainer" containerID="6b87f7e8236030d3084911733ac4af53b4d1e9f9c4db1326fa5769ed7e4e91e1" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.834252 4822 scope.go:117] "RemoveContainer" containerID="75e7995266bdbf6492daa619b0f5223c1c2481c8944e162cc407d65a37b501f6" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.878319 4822 scope.go:117] "RemoveContainer" containerID="0fb01169b6f760501811400952165121826877ab0f41bcfb6de0a4bad911be52" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.899913 4822 scope.go:117] "RemoveContainer" containerID="52174cf0388de8aa36102ca3e1e1ec5cee4d830be4c181aed635624923e8d0e1" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.945871 4822 scope.go:117] "RemoveContainer" containerID="db32a07804c815b922bb3650cdaa9cf76de9ef8ee30f368859af9cc6273db1f1" Dec 01 07:16:47 crc kubenswrapper[4822]: I1201 07:16:47.989945 4822 scope.go:117] "RemoveContainer" containerID="0f19d42ba5cdb615196de8f1acc4e7feb55cd199f580a1514463d5627d504563" Dec 01 07:16:48 crc kubenswrapper[4822]: I1201 07:16:48.026367 4822 scope.go:117] "RemoveContainer" containerID="daeb2606271c3bc7ac98c2ea3f6c896b418e1dc4f9f359b298cabd839d6d1a12" Dec 01 07:16:48 crc kubenswrapper[4822]: I1201 07:16:48.065145 4822 scope.go:117] "RemoveContainer" containerID="c4e609b36f87f6435791314d2a1c34cae79c7cc179b73cee36977247f8022d6b" Dec 01 07:16:48 crc kubenswrapper[4822]: I1201 07:16:48.096642 4822 scope.go:117] "RemoveContainer" containerID="977351d0eec5eede4e16b4c71c3f7da7b277de2618a05bde7b6e5dd4825cbde2" Dec 01 07:16:48 crc kubenswrapper[4822]: I1201 07:16:48.170088 4822 scope.go:117] "RemoveContainer" containerID="449bd9a27b6680631de93239f213a795f8827ad6d0b52167e266af16c633201e" Dec 01 07:16:48 crc kubenswrapper[4822]: I1201 07:16:48.192519 4822 scope.go:117] "RemoveContainer" containerID="4897f922706c50863ab31b9b5bb98546c2f8892b8567054eb68d14885496f350" Dec 01 07:16:48 crc kubenswrapper[4822]: I1201 07:16:48.212966 4822 scope.go:117] "RemoveContainer" containerID="deae1406f33a30faeb722a6c9ba9c11668ee2406828f189e39fe3ff530735d90" Dec 01 07:16:50 crc kubenswrapper[4822]: I1201 07:16:50.951064 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:16:50 crc kubenswrapper[4822]: E1201 07:16:50.951722 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:17:04 crc kubenswrapper[4822]: I1201 07:17:04.954445 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:17:04 crc kubenswrapper[4822]: E1201 07:17:04.955112 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:17:19 crc kubenswrapper[4822]: I1201 07:17:19.950944 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:17:19 crc kubenswrapper[4822]: E1201 07:17:19.951999 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:17:30 crc kubenswrapper[4822]: I1201 07:17:30.951821 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:17:30 crc kubenswrapper[4822]: E1201 07:17:30.952715 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.347001 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hc6pv"] Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348093 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-api" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348116 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-api" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348141 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server-init" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348153 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server-init" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348173 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b422e423-4e50-4e96-a341-d7bb5188c4af" containerName="keystone-api" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348184 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b422e423-4e50-4e96-a341-d7bb5188c4af" containerName="keystone-api" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348206 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348216 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348227 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerName="nova-cell0-conductor-conductor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348238 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerName="nova-cell0-conductor-conductor" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348252 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="openstack-network-exporter" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348265 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="openstack-network-exporter" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348284 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="sg-core" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348293 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="sg-core" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348305 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="rabbitmq" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348316 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="rabbitmq" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348330 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="rsync" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348339 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="rsync" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348351 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a2ee0e-4d60-46f9-9f2a-a094af634a64" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348360 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a2ee0e-4d60-46f9-9f2a-a094af634a64" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348376 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="galera" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348386 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="galera" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348400 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" containerName="nova-cell1-conductor-conductor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348410 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" containerName="nova-cell1-conductor-conductor" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348422 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="swift-recon-cron" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348432 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="swift-recon-cron" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348446 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348456 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-server" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348475 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="rabbitmq" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348487 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="rabbitmq" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348509 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-updater" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348519 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-updater" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348539 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-log" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348576 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-log" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348602 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-central-agent" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348613 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-central-agent" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348630 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348641 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348652 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-reaper" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348662 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-reaper" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348677 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f05675-879d-4586-af90-8aa6b11ad8a3" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348687 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f05675-879d-4586-af90-8aa6b11ad8a3" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348705 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348715 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348736 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="proxy-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348748 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="proxy-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348767 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348777 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348794 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="ovn-northd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348805 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="ovn-northd" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348818 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348829 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348849 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="mysql-bootstrap" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348860 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="mysql-bootstrap" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348881 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348893 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-server" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348910 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="setup-container" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348920 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="setup-container" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348931 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-updater" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348940 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-updater" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348956 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-notification-agent" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348966 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-notification-agent" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.348987 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1033bda-45cf-46f7-b21d-1d12a4a4a33a" containerName="collect-profiles" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.348998 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1033bda-45cf-46f7-b21d-1d12a4a4a33a" containerName="collect-profiles" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349023 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349033 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349047 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerName="nova-scheduler-scheduler" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349058 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerName="nova-scheduler-scheduler" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349076 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="setup-container" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349086 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="setup-container" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349102 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349113 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349125 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a794a063-7f9d-4a0f-9cf7-ae70b70769eb" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349135 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a794a063-7f9d-4a0f-9cf7-ae70b70769eb" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349154 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349165 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349179 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349189 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349204 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" containerName="memcached" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349216 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" containerName="memcached" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349233 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349246 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349261 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-metadata" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349272 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-metadata" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349288 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349298 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349318 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1312168-fba0-46d6-8ca3-346303262924" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349329 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1312168-fba0-46d6-8ca3-346303262924" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349347 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-log" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349357 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-log" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349376 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349387 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349400 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349412 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-server" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349430 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349441 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349455 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349465 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: E1201 07:17:37.349480 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-expirer" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.349490 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-expirer" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350380 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-expirer" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350405 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="33a2ee0e-4d60-46f9-9f2a-a094af634a64" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350486 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1312168-fba0-46d6-8ca3-346303262924" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350508 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350520 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a206e77a-0c4d-49bb-b6d9-c0d18990bd54" containerName="nova-cell0-conductor-conductor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350538 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-log" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350580 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350595 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350610 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f05675-879d-4586-af90-8aa6b11ad8a3" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350626 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350640 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350652 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-central-agent" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350671 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-reaper" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350686 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1229c08-35a5-4f16-8334-f32bb9b852b6" containerName="rabbitmq" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350701 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350719 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="da2985c5-716e-43ad-b892-ea29d88fa639" containerName="rabbitmq" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350731 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350746 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea36c73-7cc2-4da4-a6f9-14d0af7c7210" containerName="nova-scheduler-scheduler" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350765 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350783 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="rsync" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350801 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1033bda-45cf-46f7-b21d-1d12a4a4a33a" containerName="collect-profiles" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350849 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a794a063-7f9d-4a0f-9cf7-ae70b70769eb" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350863 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b725dbb9-8785-43a1-9f35-215938938f6e" containerName="nova-metadata-metadata" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350877 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="ovn-northd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350893 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="swift-recon-cron" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350906 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="ceilometer-notification-agent" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350919 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="sg-core" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350929 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-updater" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350946 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7558a6-6804-48af-b74d-394b7c5dd57e" containerName="neutron-api" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350956 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovsdb-server" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350974 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b422e423-4e50-4e96-a341-d7bb5188c4af" containerName="keystone-api" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.350987 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="baab33a8-22b8-4097-8c91-73d5f005fdf7" containerName="memcached" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351000 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="46169b3f-3e1f-4601-a82e-f3ea1bdde003" containerName="nova-cell1-conductor-conductor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351018 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad18272e-45a9-40cd-8b46-2de8cb3a31be" containerName="galera" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351028 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="27be7b1c-254b-4dd5-8889-1373d3281e64" containerName="ovs-vswitchd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351045 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="f82bf765-7694-4ad6-8680-258c9e96cde0" containerName="proxy-httpd" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351056 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="74508f35-c5cd-4e07-8883-831d2de65f35" containerName="glance-log" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351074 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="account-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351093 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd2326e5-f8a2-47ca-8519-576caa1825c5" containerName="openstack-network-exporter" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351106 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="11e4a2b6-5901-43b2-ab37-ab16b0ac03b0" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351122 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="add830fb-5a2f-4cc2-8998-32ca893263db" containerName="ovn-controller" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351137 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-updater" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351149 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="object-auditor" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351160 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21643f1-db8c-4613-ac29-f1d4d0970b7b" containerName="container-replicator" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351177 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f6d1187-bdc8-4d5e-bab9-c71cf9d5f7bf" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.351189 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ef3a79-e5a7-4045-bb8e-3fa935cfb7d3" containerName="mariadb-account-delete" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.352874 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.374243 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hc6pv"] Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.482394 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkqxs\" (UniqueName: \"kubernetes.io/projected/8c1a35cc-ec69-4551-9158-f10daad5905f-kube-api-access-gkqxs\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.482631 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-catalog-content\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.482689 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-utilities\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.583643 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-catalog-content\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.583914 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-utilities\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.584000 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkqxs\" (UniqueName: \"kubernetes.io/projected/8c1a35cc-ec69-4551-9158-f10daad5905f-kube-api-access-gkqxs\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.584709 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-catalog-content\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.584847 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-utilities\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.618450 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkqxs\" (UniqueName: \"kubernetes.io/projected/8c1a35cc-ec69-4551-9158-f10daad5905f-kube-api-access-gkqxs\") pod \"certified-operators-hc6pv\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:37 crc kubenswrapper[4822]: I1201 07:17:37.679150 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:38 crc kubenswrapper[4822]: I1201 07:17:38.302632 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hc6pv"] Dec 01 07:17:38 crc kubenswrapper[4822]: I1201 07:17:38.666811 4822 generic.go:334] "Generic (PLEG): container finished" podID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerID="d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f" exitCode=0 Dec 01 07:17:38 crc kubenswrapper[4822]: I1201 07:17:38.666910 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerDied","Data":"d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f"} Dec 01 07:17:38 crc kubenswrapper[4822]: I1201 07:17:38.667083 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerStarted","Data":"d71322432731e5db35a8446451a7a9f94782dcd11e26b33e7b1f3b84b99f2e69"} Dec 01 07:17:39 crc kubenswrapper[4822]: I1201 07:17:39.679072 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerStarted","Data":"de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26"} Dec 01 07:17:40 crc kubenswrapper[4822]: I1201 07:17:40.690651 4822 generic.go:334] "Generic (PLEG): container finished" podID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerID="de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26" exitCode=0 Dec 01 07:17:40 crc kubenswrapper[4822]: I1201 07:17:40.690856 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerDied","Data":"de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26"} Dec 01 07:17:42 crc kubenswrapper[4822]: I1201 07:17:42.711638 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerStarted","Data":"32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac"} Dec 01 07:17:42 crc kubenswrapper[4822]: I1201 07:17:42.737087 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hc6pv" podStartSLOduration=2.871025389 podStartE2EDuration="5.737057068s" podCreationTimestamp="2025-12-01 07:17:37 +0000 UTC" firstStartedPulling="2025-12-01 07:17:38.668289252 +0000 UTC m=+1613.989096948" lastFinishedPulling="2025-12-01 07:17:41.534320901 +0000 UTC m=+1616.855128627" observedRunningTime="2025-12-01 07:17:42.730965459 +0000 UTC m=+1618.051773155" watchObservedRunningTime="2025-12-01 07:17:42.737057068 +0000 UTC m=+1618.057864794" Dec 01 07:17:42 crc kubenswrapper[4822]: I1201 07:17:42.951956 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:17:42 crc kubenswrapper[4822]: E1201 07:17:42.952518 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:17:47 crc kubenswrapper[4822]: I1201 07:17:47.681031 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:47 crc kubenswrapper[4822]: I1201 07:17:47.681448 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:47 crc kubenswrapper[4822]: I1201 07:17:47.746741 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:47 crc kubenswrapper[4822]: I1201 07:17:47.829959 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:47 crc kubenswrapper[4822]: I1201 07:17:47.996273 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hc6pv"] Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.494107 4822 scope.go:117] "RemoveContainer" containerID="995ca5fdebaa312592d184b7f0d85a781106462fc236fe76b19b5c65128c9922" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.520112 4822 scope.go:117] "RemoveContainer" containerID="c74bb3693663962c268382b1974734e4c228ed0e61b1129daf7f28351b85cae6" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.540029 4822 scope.go:117] "RemoveContainer" containerID="819d59f612f038c2b40fbd5bd6e0ae52aec5cfa454b47bb1de884f3a09c04531" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.564772 4822 scope.go:117] "RemoveContainer" containerID="a96f54c06ad4ed4cadbe27b287eb8cc8b5491ac4590fd3bfa726e20944e4f2fd" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.622462 4822 scope.go:117] "RemoveContainer" containerID="42a4de3b243b1977444983268e3455770ea130dc481d53f206e1f70bf6eac99c" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.643318 4822 scope.go:117] "RemoveContainer" containerID="7b63b2bf9e63e64f024e0d88a86fea704c8fb9efcca3eec122913d83b9c7b804" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.664413 4822 scope.go:117] "RemoveContainer" containerID="70cf9d294666fb1c698936022b2f9f0fdb8b835395a4a34acdd4f7d54a3e99b9" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.702392 4822 scope.go:117] "RemoveContainer" containerID="d8ae9f3f72c10090b3836cca4c19f9fbb08bae4e11edeb4b6da22d259ef203dc" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.738017 4822 scope.go:117] "RemoveContainer" containerID="ffa9069e31e7e4f177836e190599b849cde3830e468dafa8ed7d62114db3aae7" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.757751 4822 scope.go:117] "RemoveContainer" containerID="da2877c86b5adb431c13ff28bb62c50e650b56f0e6e01accc5c938c8b253a36c" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.785447 4822 scope.go:117] "RemoveContainer" containerID="cddecd865f20481a440a7f35a1049390c9555274514b9c1774c27cfcf78dfe1b" Dec 01 07:17:48 crc kubenswrapper[4822]: I1201 07:17:48.824804 4822 scope.go:117] "RemoveContainer" containerID="5e6a9acca1e0a2760560eb2794c3fdbee1c7e308ba21194c4a721e5e3aa20688" Dec 01 07:17:49 crc kubenswrapper[4822]: I1201 07:17:49.789040 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hc6pv" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="registry-server" containerID="cri-o://32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac" gracePeriod=2 Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.218714 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.342260 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkqxs\" (UniqueName: \"kubernetes.io/projected/8c1a35cc-ec69-4551-9158-f10daad5905f-kube-api-access-gkqxs\") pod \"8c1a35cc-ec69-4551-9158-f10daad5905f\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.342380 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-catalog-content\") pod \"8c1a35cc-ec69-4551-9158-f10daad5905f\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.342433 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-utilities\") pod \"8c1a35cc-ec69-4551-9158-f10daad5905f\" (UID: \"8c1a35cc-ec69-4551-9158-f10daad5905f\") " Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.343739 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-utilities" (OuterVolumeSpecName: "utilities") pod "8c1a35cc-ec69-4551-9158-f10daad5905f" (UID: "8c1a35cc-ec69-4551-9158-f10daad5905f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.349367 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1a35cc-ec69-4551-9158-f10daad5905f-kube-api-access-gkqxs" (OuterVolumeSpecName: "kube-api-access-gkqxs") pod "8c1a35cc-ec69-4551-9158-f10daad5905f" (UID: "8c1a35cc-ec69-4551-9158-f10daad5905f"). InnerVolumeSpecName "kube-api-access-gkqxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.397229 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c1a35cc-ec69-4551-9158-f10daad5905f" (UID: "8c1a35cc-ec69-4551-9158-f10daad5905f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.444746 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.445029 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c1a35cc-ec69-4551-9158-f10daad5905f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.445255 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkqxs\" (UniqueName: \"kubernetes.io/projected/8c1a35cc-ec69-4551-9158-f10daad5905f-kube-api-access-gkqxs\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.834121 4822 generic.go:334] "Generic (PLEG): container finished" podID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerID="32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac" exitCode=0 Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.834200 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerDied","Data":"32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac"} Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.834241 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hc6pv" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.834517 4822 scope.go:117] "RemoveContainer" containerID="32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.834498 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hc6pv" event={"ID":"8c1a35cc-ec69-4551-9158-f10daad5905f","Type":"ContainerDied","Data":"d71322432731e5db35a8446451a7a9f94782dcd11e26b33e7b1f3b84b99f2e69"} Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.881533 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hc6pv"] Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.883720 4822 scope.go:117] "RemoveContainer" containerID="de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.892351 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hc6pv"] Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.915879 4822 scope.go:117] "RemoveContainer" containerID="d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.941545 4822 scope.go:117] "RemoveContainer" containerID="32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac" Dec 01 07:17:50 crc kubenswrapper[4822]: E1201 07:17:50.942694 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac\": container with ID starting with 32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac not found: ID does not exist" containerID="32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.942732 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac"} err="failed to get container status \"32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac\": rpc error: code = NotFound desc = could not find container \"32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac\": container with ID starting with 32e6d89bd04a916ed67a58c7ce5239b935ff1b3db84632f789094b7ca1a433ac not found: ID does not exist" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.942752 4822 scope.go:117] "RemoveContainer" containerID="de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26" Dec 01 07:17:50 crc kubenswrapper[4822]: E1201 07:17:50.943634 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26\": container with ID starting with de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26 not found: ID does not exist" containerID="de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.943699 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26"} err="failed to get container status \"de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26\": rpc error: code = NotFound desc = could not find container \"de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26\": container with ID starting with de41cc69a1cdba510a655274d67ce5cb49d6450ebb763598696e39a21871ed26 not found: ID does not exist" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.943750 4822 scope.go:117] "RemoveContainer" containerID="d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f" Dec 01 07:17:50 crc kubenswrapper[4822]: E1201 07:17:50.944159 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f\": container with ID starting with d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f not found: ID does not exist" containerID="d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.944203 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f"} err="failed to get container status \"d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f\": rpc error: code = NotFound desc = could not find container \"d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f\": container with ID starting with d9d03da8d617df9a77068400d59ab612033f786fd514079a8134bb241c3ae61f not found: ID does not exist" Dec 01 07:17:50 crc kubenswrapper[4822]: I1201 07:17:50.964693 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" path="/var/lib/kubelet/pods/8c1a35cc-ec69-4551-9158-f10daad5905f/volumes" Dec 01 07:17:55 crc kubenswrapper[4822]: I1201 07:17:55.950497 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:17:55 crc kubenswrapper[4822]: E1201 07:17:55.952866 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:18:09 crc kubenswrapper[4822]: I1201 07:18:09.951202 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:18:09 crc kubenswrapper[4822]: E1201 07:18:09.952142 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:18:20 crc kubenswrapper[4822]: I1201 07:18:20.951267 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:18:20 crc kubenswrapper[4822]: E1201 07:18:20.952492 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:18:32 crc kubenswrapper[4822]: I1201 07:18:32.951460 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:18:32 crc kubenswrapper[4822]: E1201 07:18:32.952647 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:18:46 crc kubenswrapper[4822]: I1201 07:18:46.951474 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:18:46 crc kubenswrapper[4822]: E1201 07:18:46.952642 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.033715 4822 scope.go:117] "RemoveContainer" containerID="2db919405f6a2933e172ecad1f1d73d7ffb6d65d29819ebf4d8122913569142f" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.061911 4822 scope.go:117] "RemoveContainer" containerID="628d09c60f5340984c3a0cf55815e7eba46360cfa8b9ded6abf84c8536c847d3" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.126118 4822 scope.go:117] "RemoveContainer" containerID="6dbc8d38bd7d89b31489d0532e8c45abe57eb433a927646e26ebb4f7bc31c405" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.191241 4822 scope.go:117] "RemoveContainer" containerID="7b3a6e868b5472233f1f6a54a7afaebe77ab0191dae41c207b1298b4a0e54500" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.212140 4822 scope.go:117] "RemoveContainer" containerID="4ab0177f95b86e1dd9ace38b493c47efe247336706a36e6bd5c5cfd1864e64b5" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.251817 4822 scope.go:117] "RemoveContainer" containerID="2ed851f5731fcc9e9358b19e778659d1ad17875e1364d03170e682f75a7df687" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.294142 4822 scope.go:117] "RemoveContainer" containerID="9f565c712bfac69eb668bb277f3e84214426f99043d5e8407c46ddd71a4c7191" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.331827 4822 scope.go:117] "RemoveContainer" containerID="16df184a4326cf5ba031b16cfb28020c3343765b30ee9bb4b3f4e9bb2c017914" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.361028 4822 scope.go:117] "RemoveContainer" containerID="23ece03fa6aebdc1cf2aed8e717b65e8f2fd1b8e42bb8f6df11deb02c5eb932b" Dec 01 07:18:49 crc kubenswrapper[4822]: I1201 07:18:49.391250 4822 scope.go:117] "RemoveContainer" containerID="dacd4a868e2652b5235b0281e3dd912a7ec2077150746b150606b21da568746e" Dec 01 07:19:01 crc kubenswrapper[4822]: I1201 07:19:01.951338 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:19:01 crc kubenswrapper[4822]: E1201 07:19:01.952469 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:19:12 crc kubenswrapper[4822]: I1201 07:19:12.951465 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:19:12 crc kubenswrapper[4822]: E1201 07:19:12.952742 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:19:26 crc kubenswrapper[4822]: I1201 07:19:26.951407 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:19:26 crc kubenswrapper[4822]: E1201 07:19:26.952649 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:19:39 crc kubenswrapper[4822]: I1201 07:19:39.951059 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:19:39 crc kubenswrapper[4822]: E1201 07:19:39.951910 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:19:49 crc kubenswrapper[4822]: I1201 07:19:49.611136 4822 scope.go:117] "RemoveContainer" containerID="b8722356572623cc5fc34be9c971b4dcd146825fe883537693297932f5b2745f" Dec 01 07:19:49 crc kubenswrapper[4822]: I1201 07:19:49.675352 4822 scope.go:117] "RemoveContainer" containerID="18522af8c645a747d69c9facd364e98dd7884610f65cb4041ab9af854278bd63" Dec 01 07:19:50 crc kubenswrapper[4822]: I1201 07:19:50.951070 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:19:50 crc kubenswrapper[4822]: E1201 07:19:50.951341 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:20:02 crc kubenswrapper[4822]: I1201 07:20:02.951104 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:20:02 crc kubenswrapper[4822]: E1201 07:20:02.952072 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:20:15 crc kubenswrapper[4822]: I1201 07:20:15.951608 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:20:15 crc kubenswrapper[4822]: E1201 07:20:15.952580 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:20:28 crc kubenswrapper[4822]: I1201 07:20:28.950514 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:20:28 crc kubenswrapper[4822]: E1201 07:20:28.951401 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:20:42 crc kubenswrapper[4822]: I1201 07:20:42.956797 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:20:42 crc kubenswrapper[4822]: E1201 07:20:42.958060 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:20:54 crc kubenswrapper[4822]: I1201 07:20:54.961045 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:20:54 crc kubenswrapper[4822]: E1201 07:20:54.962129 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:21:09 crc kubenswrapper[4822]: I1201 07:21:09.951164 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:21:09 crc kubenswrapper[4822]: E1201 07:21:09.951937 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:21:24 crc kubenswrapper[4822]: I1201 07:21:24.959748 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:21:25 crc kubenswrapper[4822]: I1201 07:21:25.964492 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"cbf6396dd64d30b51de3fa85532678906de8c3e947040cd4529cbaad8ebb533e"} Dec 01 07:21:49 crc kubenswrapper[4822]: I1201 07:21:49.797609 4822 scope.go:117] "RemoveContainer" containerID="51b8d67ad80c132d1af12a855561278f56330bd6e03769d6b5fdc4c66129804f" Dec 01 07:21:49 crc kubenswrapper[4822]: I1201 07:21:49.820387 4822 scope.go:117] "RemoveContainer" containerID="6b2321968780485e0490e50ec7771365e998c149930db8256f329ca62769e973" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.600117 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2gd6t"] Dec 01 07:22:23 crc kubenswrapper[4822]: E1201 07:22:23.601214 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="extract-content" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.601236 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="extract-content" Dec 01 07:22:23 crc kubenswrapper[4822]: E1201 07:22:23.601263 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="extract-utilities" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.601278 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="extract-utilities" Dec 01 07:22:23 crc kubenswrapper[4822]: E1201 07:22:23.601299 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="registry-server" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.601313 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="registry-server" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.601618 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c1a35cc-ec69-4551-9158-f10daad5905f" containerName="registry-server" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.603691 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.616138 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gd6t"] Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.664756 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-utilities\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.665292 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-catalog-content\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.665412 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5tnl\" (UniqueName: \"kubernetes.io/projected/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-kube-api-access-t5tnl\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.766698 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-catalog-content\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.766769 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5tnl\" (UniqueName: \"kubernetes.io/projected/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-kube-api-access-t5tnl\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.766908 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-utilities\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.768279 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-utilities\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.768384 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-catalog-content\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.796246 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5tnl\" (UniqueName: \"kubernetes.io/projected/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-kube-api-access-t5tnl\") pod \"community-operators-2gd6t\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:23 crc kubenswrapper[4822]: I1201 07:22:23.939580 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:24 crc kubenswrapper[4822]: I1201 07:22:24.459529 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gd6t"] Dec 01 07:22:24 crc kubenswrapper[4822]: I1201 07:22:24.558808 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerStarted","Data":"d3a8d0c75b6a592cb3de2164e77556f1700149c2ca48372aed03b8810714cc06"} Dec 01 07:22:25 crc kubenswrapper[4822]: I1201 07:22:25.571254 4822 generic.go:334] "Generic (PLEG): container finished" podID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerID="ee0ad5e5c64601a896f42534c72a232fb5702204f99ffa07a5a3564356dec274" exitCode=0 Dec 01 07:22:25 crc kubenswrapper[4822]: I1201 07:22:25.571353 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerDied","Data":"ee0ad5e5c64601a896f42534c72a232fb5702204f99ffa07a5a3564356dec274"} Dec 01 07:22:25 crc kubenswrapper[4822]: I1201 07:22:25.574094 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.188404 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kkwll"] Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.190175 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.199425 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkwll"] Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.209541 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7pv7\" (UniqueName: \"kubernetes.io/projected/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-kube-api-access-x7pv7\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.209800 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-utilities\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.209871 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-catalog-content\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.311224 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-catalog-content\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.311344 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7pv7\" (UniqueName: \"kubernetes.io/projected/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-kube-api-access-x7pv7\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.311380 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-utilities\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.311933 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-utilities\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.312191 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-catalog-content\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.331753 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7pv7\" (UniqueName: \"kubernetes.io/projected/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-kube-api-access-x7pv7\") pod \"redhat-marketplace-kkwll\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.531662 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.580014 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerStarted","Data":"9cf3424fade815492cd402351ece7f754db9cef901eaadaa7a73422685081d27"} Dec 01 07:22:26 crc kubenswrapper[4822]: I1201 07:22:26.996067 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkwll"] Dec 01 07:22:26 crc kubenswrapper[4822]: W1201 07:22:26.998464 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d60c2bd_5d5e_44b5_a5f8_31e616718b72.slice/crio-b8d95bde1dc3e43f65f8d433722b15e027fb38b37d92351f209b06709bb1e52f WatchSource:0}: Error finding container b8d95bde1dc3e43f65f8d433722b15e027fb38b37d92351f209b06709bb1e52f: Status 404 returned error can't find the container with id b8d95bde1dc3e43f65f8d433722b15e027fb38b37d92351f209b06709bb1e52f Dec 01 07:22:27 crc kubenswrapper[4822]: I1201 07:22:27.592283 4822 generic.go:334] "Generic (PLEG): container finished" podID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerID="a21079c9c68041fb01f1e4a025c04c36fa7141e3081d8522eb572971d6acbe2a" exitCode=0 Dec 01 07:22:27 crc kubenswrapper[4822]: I1201 07:22:27.592338 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkwll" event={"ID":"8d60c2bd-5d5e-44b5-a5f8-31e616718b72","Type":"ContainerDied","Data":"a21079c9c68041fb01f1e4a025c04c36fa7141e3081d8522eb572971d6acbe2a"} Dec 01 07:22:27 crc kubenswrapper[4822]: I1201 07:22:27.592388 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkwll" event={"ID":"8d60c2bd-5d5e-44b5-a5f8-31e616718b72","Type":"ContainerStarted","Data":"b8d95bde1dc3e43f65f8d433722b15e027fb38b37d92351f209b06709bb1e52f"} Dec 01 07:22:27 crc kubenswrapper[4822]: I1201 07:22:27.598429 4822 generic.go:334] "Generic (PLEG): container finished" podID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerID="9cf3424fade815492cd402351ece7f754db9cef901eaadaa7a73422685081d27" exitCode=0 Dec 01 07:22:27 crc kubenswrapper[4822]: I1201 07:22:27.598481 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerDied","Data":"9cf3424fade815492cd402351ece7f754db9cef901eaadaa7a73422685081d27"} Dec 01 07:22:28 crc kubenswrapper[4822]: I1201 07:22:28.610908 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerStarted","Data":"00a36555ea7d50223b66572a87ef4e04e015014dbd5a06bec6c1e1d84f18060d"} Dec 01 07:22:28 crc kubenswrapper[4822]: I1201 07:22:28.633686 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2gd6t" podStartSLOduration=3.165589849 podStartE2EDuration="5.633667952s" podCreationTimestamp="2025-12-01 07:22:23 +0000 UTC" firstStartedPulling="2025-12-01 07:22:25.573824968 +0000 UTC m=+1900.894632644" lastFinishedPulling="2025-12-01 07:22:28.041903021 +0000 UTC m=+1903.362710747" observedRunningTime="2025-12-01 07:22:28.631597164 +0000 UTC m=+1903.952404850" watchObservedRunningTime="2025-12-01 07:22:28.633667952 +0000 UTC m=+1903.954475638" Dec 01 07:22:29 crc kubenswrapper[4822]: I1201 07:22:29.628145 4822 generic.go:334] "Generic (PLEG): container finished" podID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerID="ea7a5e4587901049a007628ab7e5f6d6754711af51feb42ba9908ceaf833fa70" exitCode=0 Dec 01 07:22:29 crc kubenswrapper[4822]: I1201 07:22:29.628323 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkwll" event={"ID":"8d60c2bd-5d5e-44b5-a5f8-31e616718b72","Type":"ContainerDied","Data":"ea7a5e4587901049a007628ab7e5f6d6754711af51feb42ba9908ceaf833fa70"} Dec 01 07:22:30 crc kubenswrapper[4822]: I1201 07:22:30.636211 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkwll" event={"ID":"8d60c2bd-5d5e-44b5-a5f8-31e616718b72","Type":"ContainerStarted","Data":"ca9e95d8b042485eed321f042746f489d31c04546e4f81340273bd417dfb598f"} Dec 01 07:22:30 crc kubenswrapper[4822]: I1201 07:22:30.656505 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kkwll" podStartSLOduration=2.060364152 podStartE2EDuration="4.656484909s" podCreationTimestamp="2025-12-01 07:22:26 +0000 UTC" firstStartedPulling="2025-12-01 07:22:27.595351938 +0000 UTC m=+1902.916159654" lastFinishedPulling="2025-12-01 07:22:30.191472715 +0000 UTC m=+1905.512280411" observedRunningTime="2025-12-01 07:22:30.650236393 +0000 UTC m=+1905.971044089" watchObservedRunningTime="2025-12-01 07:22:30.656484909 +0000 UTC m=+1905.977292595" Dec 01 07:22:33 crc kubenswrapper[4822]: I1201 07:22:33.941149 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:33 crc kubenswrapper[4822]: I1201 07:22:33.941547 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:34 crc kubenswrapper[4822]: I1201 07:22:34.006045 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:34 crc kubenswrapper[4822]: I1201 07:22:34.706700 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:36 crc kubenswrapper[4822]: I1201 07:22:36.532278 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:36 crc kubenswrapper[4822]: I1201 07:22:36.532673 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:36 crc kubenswrapper[4822]: I1201 07:22:36.589100 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:36 crc kubenswrapper[4822]: I1201 07:22:36.762828 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:37 crc kubenswrapper[4822]: I1201 07:22:37.774104 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gd6t"] Dec 01 07:22:37 crc kubenswrapper[4822]: I1201 07:22:37.774319 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2gd6t" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="registry-server" containerID="cri-o://00a36555ea7d50223b66572a87ef4e04e015014dbd5a06bec6c1e1d84f18060d" gracePeriod=2 Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.704273 4822 generic.go:334] "Generic (PLEG): container finished" podID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerID="00a36555ea7d50223b66572a87ef4e04e015014dbd5a06bec6c1e1d84f18060d" exitCode=0 Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.705247 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerDied","Data":"00a36555ea7d50223b66572a87ef4e04e015014dbd5a06bec6c1e1d84f18060d"} Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.705278 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gd6t" event={"ID":"6f585b18-b4cb-4a57-bf0b-a9b836600dd3","Type":"ContainerDied","Data":"d3a8d0c75b6a592cb3de2164e77556f1700149c2ca48372aed03b8810714cc06"} Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.705289 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3a8d0c75b6a592cb3de2164e77556f1700149c2ca48372aed03b8810714cc06" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.731410 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.806138 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-utilities\") pod \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.806221 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5tnl\" (UniqueName: \"kubernetes.io/projected/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-kube-api-access-t5tnl\") pod \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.806370 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-catalog-content\") pod \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\" (UID: \"6f585b18-b4cb-4a57-bf0b-a9b836600dd3\") " Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.807278 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-utilities" (OuterVolumeSpecName: "utilities") pod "6f585b18-b4cb-4a57-bf0b-a9b836600dd3" (UID: "6f585b18-b4cb-4a57-bf0b-a9b836600dd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.813678 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-kube-api-access-t5tnl" (OuterVolumeSpecName: "kube-api-access-t5tnl") pod "6f585b18-b4cb-4a57-bf0b-a9b836600dd3" (UID: "6f585b18-b4cb-4a57-bf0b-a9b836600dd3"). InnerVolumeSpecName "kube-api-access-t5tnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.878506 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f585b18-b4cb-4a57-bf0b-a9b836600dd3" (UID: "6f585b18-b4cb-4a57-bf0b-a9b836600dd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.908128 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.908170 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:22:38 crc kubenswrapper[4822]: I1201 07:22:38.908187 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5tnl\" (UniqueName: \"kubernetes.io/projected/6f585b18-b4cb-4a57-bf0b-a9b836600dd3-kube-api-access-t5tnl\") on node \"crc\" DevicePath \"\"" Dec 01 07:22:39 crc kubenswrapper[4822]: I1201 07:22:39.718622 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gd6t" Dec 01 07:22:39 crc kubenswrapper[4822]: I1201 07:22:39.757850 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gd6t"] Dec 01 07:22:39 crc kubenswrapper[4822]: I1201 07:22:39.764707 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2gd6t"] Dec 01 07:22:40 crc kubenswrapper[4822]: I1201 07:22:40.963797 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" path="/var/lib/kubelet/pods/6f585b18-b4cb-4a57-bf0b-a9b836600dd3/volumes" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.370200 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkwll"] Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.370435 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kkwll" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="registry-server" containerID="cri-o://ca9e95d8b042485eed321f042746f489d31c04546e4f81340273bd417dfb598f" gracePeriod=2 Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.734734 4822 generic.go:334] "Generic (PLEG): container finished" podID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerID="ca9e95d8b042485eed321f042746f489d31c04546e4f81340273bd417dfb598f" exitCode=0 Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.734802 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkwll" event={"ID":"8d60c2bd-5d5e-44b5-a5f8-31e616718b72","Type":"ContainerDied","Data":"ca9e95d8b042485eed321f042746f489d31c04546e4f81340273bd417dfb598f"} Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.735069 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkwll" event={"ID":"8d60c2bd-5d5e-44b5-a5f8-31e616718b72","Type":"ContainerDied","Data":"b8d95bde1dc3e43f65f8d433722b15e027fb38b37d92351f209b06709bb1e52f"} Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.735082 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8d95bde1dc3e43f65f8d433722b15e027fb38b37d92351f209b06709bb1e52f" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.756484 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.850180 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7pv7\" (UniqueName: \"kubernetes.io/projected/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-kube-api-access-x7pv7\") pod \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.850367 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-utilities\") pod \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.850391 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-catalog-content\") pod \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\" (UID: \"8d60c2bd-5d5e-44b5-a5f8-31e616718b72\") " Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.851636 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-utilities" (OuterVolumeSpecName: "utilities") pod "8d60c2bd-5d5e-44b5-a5f8-31e616718b72" (UID: "8d60c2bd-5d5e-44b5-a5f8-31e616718b72"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.855723 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-kube-api-access-x7pv7" (OuterVolumeSpecName: "kube-api-access-x7pv7") pod "8d60c2bd-5d5e-44b5-a5f8-31e616718b72" (UID: "8d60c2bd-5d5e-44b5-a5f8-31e616718b72"). InnerVolumeSpecName "kube-api-access-x7pv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.874574 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d60c2bd-5d5e-44b5-a5f8-31e616718b72" (UID: "8d60c2bd-5d5e-44b5-a5f8-31e616718b72"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.954089 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7pv7\" (UniqueName: \"kubernetes.io/projected/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-kube-api-access-x7pv7\") on node \"crc\" DevicePath \"\"" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.954124 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:22:41 crc kubenswrapper[4822]: I1201 07:22:41.954137 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d60c2bd-5d5e-44b5-a5f8-31e616718b72-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:22:42 crc kubenswrapper[4822]: I1201 07:22:42.742853 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkwll" Dec 01 07:22:42 crc kubenswrapper[4822]: I1201 07:22:42.786333 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkwll"] Dec 01 07:22:42 crc kubenswrapper[4822]: I1201 07:22:42.798514 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkwll"] Dec 01 07:22:42 crc kubenswrapper[4822]: I1201 07:22:42.970733 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" path="/var/lib/kubelet/pods/8d60c2bd-5d5e-44b5-a5f8-31e616718b72/volumes" Dec 01 07:23:42 crc kubenswrapper[4822]: I1201 07:23:42.542877 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:23:42 crc kubenswrapper[4822]: I1201 07:23:42.543831 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:24:12 crc kubenswrapper[4822]: I1201 07:24:12.542410 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:24:12 crc kubenswrapper[4822]: I1201 07:24:12.543284 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.382742 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jvppx"] Dec 01 07:24:16 crc kubenswrapper[4822]: E1201 07:24:16.383654 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="registry-server" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383673 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="registry-server" Dec 01 07:24:16 crc kubenswrapper[4822]: E1201 07:24:16.383695 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="extract-utilities" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383704 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="extract-utilities" Dec 01 07:24:16 crc kubenswrapper[4822]: E1201 07:24:16.383719 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="extract-utilities" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383727 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="extract-utilities" Dec 01 07:24:16 crc kubenswrapper[4822]: E1201 07:24:16.383748 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="registry-server" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383755 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="registry-server" Dec 01 07:24:16 crc kubenswrapper[4822]: E1201 07:24:16.383776 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="extract-content" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383783 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="extract-content" Dec 01 07:24:16 crc kubenswrapper[4822]: E1201 07:24:16.383795 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="extract-content" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383802 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="extract-content" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383976 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f585b18-b4cb-4a57-bf0b-a9b836600dd3" containerName="registry-server" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.383996 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d60c2bd-5d5e-44b5-a5f8-31e616718b72" containerName="registry-server" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.385235 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.387998 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-catalog-content\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.388097 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-utilities\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.388136 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65svj\" (UniqueName: \"kubernetes.io/projected/a07bfbd3-a630-4303-ab30-43a23418123a-kube-api-access-65svj\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.409301 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jvppx"] Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.489270 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-utilities\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.489343 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65svj\" (UniqueName: \"kubernetes.io/projected/a07bfbd3-a630-4303-ab30-43a23418123a-kube-api-access-65svj\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.489448 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-catalog-content\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.490035 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-catalog-content\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.490170 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-utilities\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.512925 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65svj\" (UniqueName: \"kubernetes.io/projected/a07bfbd3-a630-4303-ab30-43a23418123a-kube-api-access-65svj\") pod \"redhat-operators-jvppx\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:16 crc kubenswrapper[4822]: I1201 07:24:16.718157 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:17 crc kubenswrapper[4822]: I1201 07:24:17.155476 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jvppx"] Dec 01 07:24:17 crc kubenswrapper[4822]: I1201 07:24:17.716815 4822 generic.go:334] "Generic (PLEG): container finished" podID="a07bfbd3-a630-4303-ab30-43a23418123a" containerID="cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e" exitCode=0 Dec 01 07:24:17 crc kubenswrapper[4822]: I1201 07:24:17.716935 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerDied","Data":"cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e"} Dec 01 07:24:17 crc kubenswrapper[4822]: I1201 07:24:17.717161 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerStarted","Data":"64453e7aabac7a366b46765828822e7717682e612e5deefdf232796cd8ddf757"} Dec 01 07:24:18 crc kubenswrapper[4822]: I1201 07:24:18.725015 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerStarted","Data":"d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c"} Dec 01 07:24:19 crc kubenswrapper[4822]: I1201 07:24:19.741128 4822 generic.go:334] "Generic (PLEG): container finished" podID="a07bfbd3-a630-4303-ab30-43a23418123a" containerID="d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c" exitCode=0 Dec 01 07:24:19 crc kubenswrapper[4822]: I1201 07:24:19.741192 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerDied","Data":"d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c"} Dec 01 07:24:20 crc kubenswrapper[4822]: I1201 07:24:20.752915 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerStarted","Data":"972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988"} Dec 01 07:24:20 crc kubenswrapper[4822]: I1201 07:24:20.775220 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jvppx" podStartSLOduration=2.10227724 podStartE2EDuration="4.775196669s" podCreationTimestamp="2025-12-01 07:24:16 +0000 UTC" firstStartedPulling="2025-12-01 07:24:17.718825222 +0000 UTC m=+2013.039632918" lastFinishedPulling="2025-12-01 07:24:20.391744641 +0000 UTC m=+2015.712552347" observedRunningTime="2025-12-01 07:24:20.772159824 +0000 UTC m=+2016.092967520" watchObservedRunningTime="2025-12-01 07:24:20.775196669 +0000 UTC m=+2016.096004365" Dec 01 07:24:26 crc kubenswrapper[4822]: I1201 07:24:26.719844 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:26 crc kubenswrapper[4822]: I1201 07:24:26.721883 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:27 crc kubenswrapper[4822]: I1201 07:24:27.803579 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jvppx" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="registry-server" probeResult="failure" output=< Dec 01 07:24:27 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 07:24:27 crc kubenswrapper[4822]: > Dec 01 07:24:36 crc kubenswrapper[4822]: I1201 07:24:36.856999 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:36 crc kubenswrapper[4822]: I1201 07:24:36.958944 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:37 crc kubenswrapper[4822]: I1201 07:24:37.109944 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jvppx"] Dec 01 07:24:37 crc kubenswrapper[4822]: I1201 07:24:37.897216 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jvppx" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="registry-server" containerID="cri-o://972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988" gracePeriod=2 Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.362059 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.538935 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-utilities\") pod \"a07bfbd3-a630-4303-ab30-43a23418123a\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.538997 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65svj\" (UniqueName: \"kubernetes.io/projected/a07bfbd3-a630-4303-ab30-43a23418123a-kube-api-access-65svj\") pod \"a07bfbd3-a630-4303-ab30-43a23418123a\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.539064 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-catalog-content\") pod \"a07bfbd3-a630-4303-ab30-43a23418123a\" (UID: \"a07bfbd3-a630-4303-ab30-43a23418123a\") " Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.540521 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-utilities" (OuterVolumeSpecName: "utilities") pod "a07bfbd3-a630-4303-ab30-43a23418123a" (UID: "a07bfbd3-a630-4303-ab30-43a23418123a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.545451 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a07bfbd3-a630-4303-ab30-43a23418123a-kube-api-access-65svj" (OuterVolumeSpecName: "kube-api-access-65svj") pod "a07bfbd3-a630-4303-ab30-43a23418123a" (UID: "a07bfbd3-a630-4303-ab30-43a23418123a"). InnerVolumeSpecName "kube-api-access-65svj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.641423 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.641495 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65svj\" (UniqueName: \"kubernetes.io/projected/a07bfbd3-a630-4303-ab30-43a23418123a-kube-api-access-65svj\") on node \"crc\" DevicePath \"\"" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.712268 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a07bfbd3-a630-4303-ab30-43a23418123a" (UID: "a07bfbd3-a630-4303-ab30-43a23418123a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.743225 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07bfbd3-a630-4303-ab30-43a23418123a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.912477 4822 generic.go:334] "Generic (PLEG): container finished" podID="a07bfbd3-a630-4303-ab30-43a23418123a" containerID="972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988" exitCode=0 Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.912535 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerDied","Data":"972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988"} Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.912636 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvppx" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.912683 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvppx" event={"ID":"a07bfbd3-a630-4303-ab30-43a23418123a","Type":"ContainerDied","Data":"64453e7aabac7a366b46765828822e7717682e612e5deefdf232796cd8ddf757"} Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.912729 4822 scope.go:117] "RemoveContainer" containerID="972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.970086 4822 scope.go:117] "RemoveContainer" containerID="d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c" Dec 01 07:24:38 crc kubenswrapper[4822]: I1201 07:24:38.996846 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jvppx"] Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.007719 4822 scope.go:117] "RemoveContainer" containerID="cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e" Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.013016 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jvppx"] Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.027287 4822 scope.go:117] "RemoveContainer" containerID="972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988" Dec 01 07:24:39 crc kubenswrapper[4822]: E1201 07:24:39.027708 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988\": container with ID starting with 972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988 not found: ID does not exist" containerID="972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988" Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.027746 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988"} err="failed to get container status \"972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988\": rpc error: code = NotFound desc = could not find container \"972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988\": container with ID starting with 972ca936ff544ea7a866300c64595af9bb4d3761ad92082ea33faa51ab6a8988 not found: ID does not exist" Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.027774 4822 scope.go:117] "RemoveContainer" containerID="d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c" Dec 01 07:24:39 crc kubenswrapper[4822]: E1201 07:24:39.028371 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c\": container with ID starting with d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c not found: ID does not exist" containerID="d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c" Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.028422 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c"} err="failed to get container status \"d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c\": rpc error: code = NotFound desc = could not find container \"d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c\": container with ID starting with d4c6263a14813855a882230a9376b2f5434e0138b3dd30574e3d9038a9509e7c not found: ID does not exist" Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.028454 4822 scope.go:117] "RemoveContainer" containerID="cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e" Dec 01 07:24:39 crc kubenswrapper[4822]: E1201 07:24:39.028874 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e\": container with ID starting with cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e not found: ID does not exist" containerID="cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e" Dec 01 07:24:39 crc kubenswrapper[4822]: I1201 07:24:39.028903 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e"} err="failed to get container status \"cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e\": rpc error: code = NotFound desc = could not find container \"cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e\": container with ID starting with cb6298f3541b335e89edcd1f5fd4a919363624aa5860e9a663f26340db7f457e not found: ID does not exist" Dec 01 07:24:40 crc kubenswrapper[4822]: I1201 07:24:40.961345 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" path="/var/lib/kubelet/pods/a07bfbd3-a630-4303-ab30-43a23418123a/volumes" Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.542784 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.542877 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.542943 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.543850 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cbf6396dd64d30b51de3fa85532678906de8c3e947040cd4529cbaad8ebb533e"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.543941 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://cbf6396dd64d30b51de3fa85532678906de8c3e947040cd4529cbaad8ebb533e" gracePeriod=600 Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.946609 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="cbf6396dd64d30b51de3fa85532678906de8c3e947040cd4529cbaad8ebb533e" exitCode=0 Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.946661 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"cbf6396dd64d30b51de3fa85532678906de8c3e947040cd4529cbaad8ebb533e"} Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.947008 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79"} Dec 01 07:24:42 crc kubenswrapper[4822]: I1201 07:24:42.947049 4822 scope.go:117] "RemoveContainer" containerID="96ae9d2ad63cf3cc2210320bc1653dce049f9598f13beaec883faa407b5ea272" Dec 01 07:26:42 crc kubenswrapper[4822]: I1201 07:26:42.542831 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:26:42 crc kubenswrapper[4822]: I1201 07:26:42.543600 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:27:12 crc kubenswrapper[4822]: I1201 07:27:12.543007 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:27:12 crc kubenswrapper[4822]: I1201 07:27:12.543673 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.542848 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.543373 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.543421 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.544024 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.544149 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" gracePeriod=600 Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.684578 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" exitCode=0 Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.684613 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79"} Dec 01 07:27:42 crc kubenswrapper[4822]: I1201 07:27:42.684688 4822 scope.go:117] "RemoveContainer" containerID="cbf6396dd64d30b51de3fa85532678906de8c3e947040cd4529cbaad8ebb533e" Dec 01 07:27:42 crc kubenswrapper[4822]: E1201 07:27:42.687124 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:27:43 crc kubenswrapper[4822]: I1201 07:27:43.700417 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:27:43 crc kubenswrapper[4822]: E1201 07:27:43.700914 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:27:55 crc kubenswrapper[4822]: I1201 07:27:55.950265 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:27:55 crc kubenswrapper[4822]: E1201 07:27:55.951200 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:28:06 crc kubenswrapper[4822]: I1201 07:28:06.951241 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:28:06 crc kubenswrapper[4822]: E1201 07:28:06.952197 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:28:20 crc kubenswrapper[4822]: I1201 07:28:20.950993 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:28:20 crc kubenswrapper[4822]: E1201 07:28:20.951851 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:28:32 crc kubenswrapper[4822]: I1201 07:28:32.951064 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:28:32 crc kubenswrapper[4822]: E1201 07:28:32.951775 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:28:43 crc kubenswrapper[4822]: I1201 07:28:43.951347 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:28:43 crc kubenswrapper[4822]: E1201 07:28:43.953363 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.690583 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5c4xv"] Dec 01 07:28:46 crc kubenswrapper[4822]: E1201 07:28:46.706100 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="extract-content" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.706144 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="extract-content" Dec 01 07:28:46 crc kubenswrapper[4822]: E1201 07:28:46.706175 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="registry-server" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.706194 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="registry-server" Dec 01 07:28:46 crc kubenswrapper[4822]: E1201 07:28:46.706286 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="extract-utilities" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.706304 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="extract-utilities" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.706623 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="a07bfbd3-a630-4303-ab30-43a23418123a" containerName="registry-server" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.750000 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.775184 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5c4xv"] Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.849540 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-catalog-content\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.849748 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-utilities\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.849835 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8577j\" (UniqueName: \"kubernetes.io/projected/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-kube-api-access-8577j\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.951146 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-catalog-content\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.951211 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-utilities\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.951237 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8577j\" (UniqueName: \"kubernetes.io/projected/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-kube-api-access-8577j\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.952038 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-catalog-content\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.952270 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-utilities\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:46 crc kubenswrapper[4822]: I1201 07:28:46.979456 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8577j\" (UniqueName: \"kubernetes.io/projected/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-kube-api-access-8577j\") pod \"certified-operators-5c4xv\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:47 crc kubenswrapper[4822]: I1201 07:28:47.086226 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:47 crc kubenswrapper[4822]: I1201 07:28:47.708607 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5c4xv"] Dec 01 07:28:47 crc kubenswrapper[4822]: W1201 07:28:47.714792 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41ec3f88_e78b_4bfe_b79f_3c8b4bc2346d.slice/crio-53bb851f7e1a9e43620a181323b7078021b87607c1d6ceb860a75907f511941e WatchSource:0}: Error finding container 53bb851f7e1a9e43620a181323b7078021b87607c1d6ceb860a75907f511941e: Status 404 returned error can't find the container with id 53bb851f7e1a9e43620a181323b7078021b87607c1d6ceb860a75907f511941e Dec 01 07:28:48 crc kubenswrapper[4822]: I1201 07:28:48.337630 4822 generic.go:334] "Generic (PLEG): container finished" podID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerID="78307c0eeb31d8e5da685e63c38f9a2ecd847a701ccd53f727892e981b6663ae" exitCode=0 Dec 01 07:28:48 crc kubenswrapper[4822]: I1201 07:28:48.337709 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5c4xv" event={"ID":"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d","Type":"ContainerDied","Data":"78307c0eeb31d8e5da685e63c38f9a2ecd847a701ccd53f727892e981b6663ae"} Dec 01 07:28:48 crc kubenswrapper[4822]: I1201 07:28:48.337759 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5c4xv" event={"ID":"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d","Type":"ContainerStarted","Data":"53bb851f7e1a9e43620a181323b7078021b87607c1d6ceb860a75907f511941e"} Dec 01 07:28:48 crc kubenswrapper[4822]: I1201 07:28:48.339973 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.012325 4822 scope.go:117] "RemoveContainer" containerID="9cf3424fade815492cd402351ece7f754db9cef901eaadaa7a73422685081d27" Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.043711 4822 scope.go:117] "RemoveContainer" containerID="ea7a5e4587901049a007628ab7e5f6d6754711af51feb42ba9908ceaf833fa70" Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.064355 4822 scope.go:117] "RemoveContainer" containerID="ca9e95d8b042485eed321f042746f489d31c04546e4f81340273bd417dfb598f" Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.105477 4822 scope.go:117] "RemoveContainer" containerID="00a36555ea7d50223b66572a87ef4e04e015014dbd5a06bec6c1e1d84f18060d" Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.126811 4822 scope.go:117] "RemoveContainer" containerID="a21079c9c68041fb01f1e4a025c04c36fa7141e3081d8522eb572971d6acbe2a" Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.151865 4822 scope.go:117] "RemoveContainer" containerID="ee0ad5e5c64601a896f42534c72a232fb5702204f99ffa07a5a3564356dec274" Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.358389 4822 generic.go:334] "Generic (PLEG): container finished" podID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerID="1310a754a331cddd6d4980907ff1d251384a9f4b34fd85f1cb9314da3dc4c8db" exitCode=0 Dec 01 07:28:50 crc kubenswrapper[4822]: I1201 07:28:50.358469 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5c4xv" event={"ID":"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d","Type":"ContainerDied","Data":"1310a754a331cddd6d4980907ff1d251384a9f4b34fd85f1cb9314da3dc4c8db"} Dec 01 07:28:51 crc kubenswrapper[4822]: I1201 07:28:51.371463 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5c4xv" event={"ID":"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d","Type":"ContainerStarted","Data":"b20298a020e9cb74d8337c92c1d21dc8dbe89d12e79b340eb45106fa1fb75ac3"} Dec 01 07:28:51 crc kubenswrapper[4822]: I1201 07:28:51.399372 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5c4xv" podStartSLOduration=2.9352423 podStartE2EDuration="5.399348208s" podCreationTimestamp="2025-12-01 07:28:46 +0000 UTC" firstStartedPulling="2025-12-01 07:28:48.339619549 +0000 UTC m=+2283.660427245" lastFinishedPulling="2025-12-01 07:28:50.803725437 +0000 UTC m=+2286.124533153" observedRunningTime="2025-12-01 07:28:51.391607171 +0000 UTC m=+2286.712414937" watchObservedRunningTime="2025-12-01 07:28:51.399348208 +0000 UTC m=+2286.720155904" Dec 01 07:28:56 crc kubenswrapper[4822]: I1201 07:28:56.951113 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:28:56 crc kubenswrapper[4822]: E1201 07:28:56.952209 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:28:57 crc kubenswrapper[4822]: I1201 07:28:57.087521 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:57 crc kubenswrapper[4822]: I1201 07:28:57.087871 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:57 crc kubenswrapper[4822]: I1201 07:28:57.165097 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:57 crc kubenswrapper[4822]: I1201 07:28:57.507582 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:28:57 crc kubenswrapper[4822]: I1201 07:28:57.565926 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5c4xv"] Dec 01 07:28:59 crc kubenswrapper[4822]: I1201 07:28:59.443154 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5c4xv" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="registry-server" containerID="cri-o://b20298a020e9cb74d8337c92c1d21dc8dbe89d12e79b340eb45106fa1fb75ac3" gracePeriod=2 Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.457342 4822 generic.go:334] "Generic (PLEG): container finished" podID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerID="b20298a020e9cb74d8337c92c1d21dc8dbe89d12e79b340eb45106fa1fb75ac3" exitCode=0 Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.457424 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5c4xv" event={"ID":"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d","Type":"ContainerDied","Data":"b20298a020e9cb74d8337c92c1d21dc8dbe89d12e79b340eb45106fa1fb75ac3"} Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.507875 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.616613 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-utilities\") pod \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.616681 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8577j\" (UniqueName: \"kubernetes.io/projected/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-kube-api-access-8577j\") pod \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.616711 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-catalog-content\") pod \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\" (UID: \"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d\") " Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.617759 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-utilities" (OuterVolumeSpecName: "utilities") pod "41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" (UID: "41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.629499 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-kube-api-access-8577j" (OuterVolumeSpecName: "kube-api-access-8577j") pod "41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" (UID: "41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d"). InnerVolumeSpecName "kube-api-access-8577j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.699759 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" (UID: "41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.718537 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8577j\" (UniqueName: \"kubernetes.io/projected/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-kube-api-access-8577j\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.718772 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:00 crc kubenswrapper[4822]: I1201 07:29:00.718831 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.468230 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5c4xv" event={"ID":"41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d","Type":"ContainerDied","Data":"53bb851f7e1a9e43620a181323b7078021b87607c1d6ceb860a75907f511941e"} Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.468291 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5c4xv" Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.468736 4822 scope.go:117] "RemoveContainer" containerID="b20298a020e9cb74d8337c92c1d21dc8dbe89d12e79b340eb45106fa1fb75ac3" Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.487876 4822 scope.go:117] "RemoveContainer" containerID="1310a754a331cddd6d4980907ff1d251384a9f4b34fd85f1cb9314da3dc4c8db" Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.499054 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5c4xv"] Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.511835 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5c4xv"] Dec 01 07:29:01 crc kubenswrapper[4822]: I1201 07:29:01.520000 4822 scope.go:117] "RemoveContainer" containerID="78307c0eeb31d8e5da685e63c38f9a2ecd847a701ccd53f727892e981b6663ae" Dec 01 07:29:02 crc kubenswrapper[4822]: I1201 07:29:02.961506 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" path="/var/lib/kubelet/pods/41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d/volumes" Dec 01 07:29:10 crc kubenswrapper[4822]: I1201 07:29:10.950842 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:29:10 crc kubenswrapper[4822]: E1201 07:29:10.951576 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:29:23 crc kubenswrapper[4822]: I1201 07:29:23.950814 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:29:23 crc kubenswrapper[4822]: E1201 07:29:23.951919 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:29:37 crc kubenswrapper[4822]: I1201 07:29:37.951695 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:29:37 crc kubenswrapper[4822]: E1201 07:29:37.952848 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:29:49 crc kubenswrapper[4822]: I1201 07:29:49.952735 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:29:49 crc kubenswrapper[4822]: E1201 07:29:49.953645 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.172867 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb"] Dec 01 07:30:00 crc kubenswrapper[4822]: E1201 07:30:00.175168 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="extract-content" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.175340 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="extract-content" Dec 01 07:30:00 crc kubenswrapper[4822]: E1201 07:30:00.175492 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="extract-utilities" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.175625 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="extract-utilities" Dec 01 07:30:00 crc kubenswrapper[4822]: E1201 07:30:00.175738 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="registry-server" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.175841 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="registry-server" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.176179 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ec3f88-e78b-4bfe-b79f-3c8b4bc2346d" containerName="registry-server" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.177168 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.180938 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb"] Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.185979 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.186182 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.289247 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-config-volume\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.289320 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhd5s\" (UniqueName: \"kubernetes.io/projected/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-kube-api-access-dhd5s\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.289808 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-secret-volume\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.391640 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-secret-volume\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.391729 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-config-volume\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.391792 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhd5s\" (UniqueName: \"kubernetes.io/projected/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-kube-api-access-dhd5s\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.394602 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-config-volume\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.405938 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-secret-volume\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.408134 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhd5s\" (UniqueName: \"kubernetes.io/projected/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-kube-api-access-dhd5s\") pod \"collect-profiles-29409570-dcbjb\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:00 crc kubenswrapper[4822]: I1201 07:30:00.504701 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:01 crc kubenswrapper[4822]: I1201 07:30:01.003475 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb"] Dec 01 07:30:01 crc kubenswrapper[4822]: I1201 07:30:01.032125 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" event={"ID":"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce","Type":"ContainerStarted","Data":"05cf4af19bcebb4e1949ed32867f58b30d9c8dd452dda5d0fd7b4f816f65568e"} Dec 01 07:30:01 crc kubenswrapper[4822]: E1201 07:30:01.464965 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4ceca39_ee9e_4c6f_8d01_a4fecae0c9ce.slice/crio-conmon-94f016207beed4324ad054f7727fc37b96ba33888fa8a62008a36c251df8211b.scope\": RecentStats: unable to find data in memory cache]" Dec 01 07:30:02 crc kubenswrapper[4822]: I1201 07:30:02.045480 4822 generic.go:334] "Generic (PLEG): container finished" podID="d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" containerID="94f016207beed4324ad054f7727fc37b96ba33888fa8a62008a36c251df8211b" exitCode=0 Dec 01 07:30:02 crc kubenswrapper[4822]: I1201 07:30:02.045532 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" event={"ID":"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce","Type":"ContainerDied","Data":"94f016207beed4324ad054f7727fc37b96ba33888fa8a62008a36c251df8211b"} Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.412750 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.439356 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-config-volume\") pod \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.441168 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-config-volume" (OuterVolumeSpecName: "config-volume") pod "d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" (UID: "d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.540744 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-secret-volume\") pod \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.540854 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhd5s\" (UniqueName: \"kubernetes.io/projected/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-kube-api-access-dhd5s\") pod \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\" (UID: \"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce\") " Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.541318 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.547135 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" (UID: "d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.547256 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-kube-api-access-dhd5s" (OuterVolumeSpecName: "kube-api-access-dhd5s") pod "d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" (UID: "d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce"). InnerVolumeSpecName "kube-api-access-dhd5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.643126 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhd5s\" (UniqueName: \"kubernetes.io/projected/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-kube-api-access-dhd5s\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:03 crc kubenswrapper[4822]: I1201 07:30:03.643258 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.065124 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" event={"ID":"d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce","Type":"ContainerDied","Data":"05cf4af19bcebb4e1949ed32867f58b30d9c8dd452dda5d0fd7b4f816f65568e"} Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.065405 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05cf4af19bcebb4e1949ed32867f58b30d9c8dd452dda5d0fd7b4f816f65568e" Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.065301 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb" Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.524250 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7"] Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.532303 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-szbr7"] Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.960535 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:30:04 crc kubenswrapper[4822]: E1201 07:30:04.961056 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:30:04 crc kubenswrapper[4822]: I1201 07:30:04.969627 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c1f05f0-aa0d-48fa-a6de-04c19783f5c2" path="/var/lib/kubelet/pods/2c1f05f0-aa0d-48fa-a6de-04c19783f5c2/volumes" Dec 01 07:30:19 crc kubenswrapper[4822]: I1201 07:30:19.950784 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:30:19 crc kubenswrapper[4822]: E1201 07:30:19.951677 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:30:32 crc kubenswrapper[4822]: I1201 07:30:32.951984 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:30:32 crc kubenswrapper[4822]: E1201 07:30:32.955047 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:30:45 crc kubenswrapper[4822]: I1201 07:30:45.951194 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:30:45 crc kubenswrapper[4822]: E1201 07:30:45.952281 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:30:50 crc kubenswrapper[4822]: I1201 07:30:50.266189 4822 scope.go:117] "RemoveContainer" containerID="0919d5049df538c888b405979ba43e6793a4eeaf631c4b79997d1d08d77387bb" Dec 01 07:30:58 crc kubenswrapper[4822]: I1201 07:30:58.951237 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:30:58 crc kubenswrapper[4822]: E1201 07:30:58.952066 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:31:10 crc kubenswrapper[4822]: I1201 07:31:10.950667 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:31:10 crc kubenswrapper[4822]: E1201 07:31:10.951375 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:31:22 crc kubenswrapper[4822]: I1201 07:31:22.951596 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:31:22 crc kubenswrapper[4822]: E1201 07:31:22.952650 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:31:36 crc kubenswrapper[4822]: I1201 07:31:36.951077 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:31:36 crc kubenswrapper[4822]: E1201 07:31:36.952149 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:31:51 crc kubenswrapper[4822]: I1201 07:31:51.951518 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:31:51 crc kubenswrapper[4822]: E1201 07:31:51.952831 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:32:03 crc kubenswrapper[4822]: I1201 07:32:03.950316 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:32:03 crc kubenswrapper[4822]: E1201 07:32:03.950828 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:32:14 crc kubenswrapper[4822]: I1201 07:32:14.968005 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:32:14 crc kubenswrapper[4822]: E1201 07:32:14.969393 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.865574 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sx42t"] Dec 01 07:32:26 crc kubenswrapper[4822]: E1201 07:32:26.866746 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" containerName="collect-profiles" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.866769 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" containerName="collect-profiles" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.867042 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" containerName="collect-profiles" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.870058 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.881549 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx42t"] Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.951335 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:32:26 crc kubenswrapper[4822]: E1201 07:32:26.951732 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.965292 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-catalog-content\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.965397 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf22m\" (UniqueName: \"kubernetes.io/projected/126b8fd9-d056-43d2-a857-9d69e5d8fd46-kube-api-access-xf22m\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:26 crc kubenswrapper[4822]: I1201 07:32:26.965453 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-utilities\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.067470 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-catalog-content\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.067910 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf22m\" (UniqueName: \"kubernetes.io/projected/126b8fd9-d056-43d2-a857-9d69e5d8fd46-kube-api-access-xf22m\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.067939 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-utilities\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.068458 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-utilities\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.069913 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-catalog-content\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.092015 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf22m\" (UniqueName: \"kubernetes.io/projected/126b8fd9-d056-43d2-a857-9d69e5d8fd46-kube-api-access-xf22m\") pod \"redhat-marketplace-sx42t\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.195647 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.460204 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx42t"] Dec 01 07:32:27 crc kubenswrapper[4822]: I1201 07:32:27.486471 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx42t" event={"ID":"126b8fd9-d056-43d2-a857-9d69e5d8fd46","Type":"ContainerStarted","Data":"fc08bcc4469143de5d8de9811c1917bf8ff71f6fa698b7c049c8bf52c4a455fd"} Dec 01 07:32:28 crc kubenswrapper[4822]: I1201 07:32:28.502413 4822 generic.go:334] "Generic (PLEG): container finished" podID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerID="cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060" exitCode=0 Dec 01 07:32:28 crc kubenswrapper[4822]: I1201 07:32:28.502503 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx42t" event={"ID":"126b8fd9-d056-43d2-a857-9d69e5d8fd46","Type":"ContainerDied","Data":"cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060"} Dec 01 07:32:30 crc kubenswrapper[4822]: I1201 07:32:30.524362 4822 generic.go:334] "Generic (PLEG): container finished" podID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerID="e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258" exitCode=0 Dec 01 07:32:30 crc kubenswrapper[4822]: I1201 07:32:30.524488 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx42t" event={"ID":"126b8fd9-d056-43d2-a857-9d69e5d8fd46","Type":"ContainerDied","Data":"e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258"} Dec 01 07:32:31 crc kubenswrapper[4822]: I1201 07:32:31.541361 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx42t" event={"ID":"126b8fd9-d056-43d2-a857-9d69e5d8fd46","Type":"ContainerStarted","Data":"9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10"} Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.284905 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sx42t" podStartSLOduration=4.842440424 podStartE2EDuration="7.284879446s" podCreationTimestamp="2025-12-01 07:32:26 +0000 UTC" firstStartedPulling="2025-12-01 07:32:28.506040297 +0000 UTC m=+2503.826847993" lastFinishedPulling="2025-12-01 07:32:30.948479299 +0000 UTC m=+2506.269287015" observedRunningTime="2025-12-01 07:32:31.57914852 +0000 UTC m=+2506.899956206" watchObservedRunningTime="2025-12-01 07:32:33.284879446 +0000 UTC m=+2508.605687142" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.289747 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vgdjq"] Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.292116 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.303016 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgdjq"] Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.483988 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eba72c1f-8fb5-45e5-9363-cf22c664614d-catalog-content\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.484078 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eba72c1f-8fb5-45e5-9363-cf22c664614d-utilities\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.484101 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpzx8\" (UniqueName: \"kubernetes.io/projected/eba72c1f-8fb5-45e5-9363-cf22c664614d-kube-api-access-rpzx8\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.585909 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eba72c1f-8fb5-45e5-9363-cf22c664614d-utilities\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.585959 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpzx8\" (UniqueName: \"kubernetes.io/projected/eba72c1f-8fb5-45e5-9363-cf22c664614d-kube-api-access-rpzx8\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.586032 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eba72c1f-8fb5-45e5-9363-cf22c664614d-catalog-content\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.586411 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eba72c1f-8fb5-45e5-9363-cf22c664614d-utilities\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.586598 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eba72c1f-8fb5-45e5-9363-cf22c664614d-catalog-content\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.618350 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpzx8\" (UniqueName: \"kubernetes.io/projected/eba72c1f-8fb5-45e5-9363-cf22c664614d-kube-api-access-rpzx8\") pod \"community-operators-vgdjq\" (UID: \"eba72c1f-8fb5-45e5-9363-cf22c664614d\") " pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.640901 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:33 crc kubenswrapper[4822]: I1201 07:32:33.914866 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgdjq"] Dec 01 07:32:34 crc kubenswrapper[4822]: I1201 07:32:34.573761 4822 generic.go:334] "Generic (PLEG): container finished" podID="eba72c1f-8fb5-45e5-9363-cf22c664614d" containerID="b6d64a667370681c681dff5ac450678c638ded2353235ac983c094937d2d66d1" exitCode=0 Dec 01 07:32:34 crc kubenswrapper[4822]: I1201 07:32:34.575013 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgdjq" event={"ID":"eba72c1f-8fb5-45e5-9363-cf22c664614d","Type":"ContainerDied","Data":"b6d64a667370681c681dff5ac450678c638ded2353235ac983c094937d2d66d1"} Dec 01 07:32:34 crc kubenswrapper[4822]: I1201 07:32:34.575237 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgdjq" event={"ID":"eba72c1f-8fb5-45e5-9363-cf22c664614d","Type":"ContainerStarted","Data":"754e3f826ab8d13feb90833cb0604812ceb3f59033a61c18807d5b432019b903"} Dec 01 07:32:37 crc kubenswrapper[4822]: I1201 07:32:37.196121 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:37 crc kubenswrapper[4822]: I1201 07:32:37.198021 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:37 crc kubenswrapper[4822]: I1201 07:32:37.255983 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:37 crc kubenswrapper[4822]: I1201 07:32:37.664372 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:38 crc kubenswrapper[4822]: I1201 07:32:38.419022 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx42t"] Dec 01 07:32:38 crc kubenswrapper[4822]: I1201 07:32:38.616597 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgdjq" event={"ID":"eba72c1f-8fb5-45e5-9363-cf22c664614d","Type":"ContainerStarted","Data":"7f64584bb7f6d719f73999edbb3737e698eb27a2b6902748f45a3c7f91f9eebf"} Dec 01 07:32:39 crc kubenswrapper[4822]: I1201 07:32:39.662269 4822 generic.go:334] "Generic (PLEG): container finished" podID="eba72c1f-8fb5-45e5-9363-cf22c664614d" containerID="7f64584bb7f6d719f73999edbb3737e698eb27a2b6902748f45a3c7f91f9eebf" exitCode=0 Dec 01 07:32:39 crc kubenswrapper[4822]: I1201 07:32:39.662360 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgdjq" event={"ID":"eba72c1f-8fb5-45e5-9363-cf22c664614d","Type":"ContainerDied","Data":"7f64584bb7f6d719f73999edbb3737e698eb27a2b6902748f45a3c7f91f9eebf"} Dec 01 07:32:39 crc kubenswrapper[4822]: I1201 07:32:39.662705 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sx42t" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="registry-server" containerID="cri-o://9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10" gracePeriod=2 Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.131299 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.302703 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-catalog-content\") pod \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.303248 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-utilities\") pod \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.303715 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf22m\" (UniqueName: \"kubernetes.io/projected/126b8fd9-d056-43d2-a857-9d69e5d8fd46-kube-api-access-xf22m\") pod \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\" (UID: \"126b8fd9-d056-43d2-a857-9d69e5d8fd46\") " Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.306995 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-utilities" (OuterVolumeSpecName: "utilities") pod "126b8fd9-d056-43d2-a857-9d69e5d8fd46" (UID: "126b8fd9-d056-43d2-a857-9d69e5d8fd46"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.312806 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/126b8fd9-d056-43d2-a857-9d69e5d8fd46-kube-api-access-xf22m" (OuterVolumeSpecName: "kube-api-access-xf22m") pod "126b8fd9-d056-43d2-a857-9d69e5d8fd46" (UID: "126b8fd9-d056-43d2-a857-9d69e5d8fd46"). InnerVolumeSpecName "kube-api-access-xf22m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.321073 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "126b8fd9-d056-43d2-a857-9d69e5d8fd46" (UID: "126b8fd9-d056-43d2-a857-9d69e5d8fd46"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.405361 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.405417 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf22m\" (UniqueName: \"kubernetes.io/projected/126b8fd9-d056-43d2-a857-9d69e5d8fd46-kube-api-access-xf22m\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.405431 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/126b8fd9-d056-43d2-a857-9d69e5d8fd46-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.675650 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgdjq" event={"ID":"eba72c1f-8fb5-45e5-9363-cf22c664614d","Type":"ContainerStarted","Data":"327b329594c468cfdadb5d0c503469ea8127b790d5194d96ea9d86fdf4bd7694"} Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.680415 4822 generic.go:334] "Generic (PLEG): container finished" podID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerID="9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10" exitCode=0 Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.680463 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx42t" event={"ID":"126b8fd9-d056-43d2-a857-9d69e5d8fd46","Type":"ContainerDied","Data":"9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10"} Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.680769 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx42t" event={"ID":"126b8fd9-d056-43d2-a857-9d69e5d8fd46","Type":"ContainerDied","Data":"fc08bcc4469143de5d8de9811c1917bf8ff71f6fa698b7c049c8bf52c4a455fd"} Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.680510 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx42t" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.680833 4822 scope.go:117] "RemoveContainer" containerID="9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.701521 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vgdjq" podStartSLOduration=1.988229005 podStartE2EDuration="7.701502359s" podCreationTimestamp="2025-12-01 07:32:33 +0000 UTC" firstStartedPulling="2025-12-01 07:32:34.576163436 +0000 UTC m=+2509.896971152" lastFinishedPulling="2025-12-01 07:32:40.2894368 +0000 UTC m=+2515.610244506" observedRunningTime="2025-12-01 07:32:40.695060809 +0000 UTC m=+2516.015868505" watchObservedRunningTime="2025-12-01 07:32:40.701502359 +0000 UTC m=+2516.022310065" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.711223 4822 scope.go:117] "RemoveContainer" containerID="e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.729084 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx42t"] Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.731084 4822 scope.go:117] "RemoveContainer" containerID="cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.733828 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx42t"] Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.754802 4822 scope.go:117] "RemoveContainer" containerID="9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10" Dec 01 07:32:40 crc kubenswrapper[4822]: E1201 07:32:40.755285 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10\": container with ID starting with 9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10 not found: ID does not exist" containerID="9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.755338 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10"} err="failed to get container status \"9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10\": rpc error: code = NotFound desc = could not find container \"9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10\": container with ID starting with 9cb806ae638bb37a5ff11aad6bccf9edb388ab5f6b3a73233fa6785f2c3f8e10 not found: ID does not exist" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.755358 4822 scope.go:117] "RemoveContainer" containerID="e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258" Dec 01 07:32:40 crc kubenswrapper[4822]: E1201 07:32:40.755684 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258\": container with ID starting with e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258 not found: ID does not exist" containerID="e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.755739 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258"} err="failed to get container status \"e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258\": rpc error: code = NotFound desc = could not find container \"e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258\": container with ID starting with e7a27e5fe37122ad97ce66b87e684ec37b0017fdb664ba658bd20111df494258 not found: ID does not exist" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.755779 4822 scope.go:117] "RemoveContainer" containerID="cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060" Dec 01 07:32:40 crc kubenswrapper[4822]: E1201 07:32:40.756072 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060\": container with ID starting with cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060 not found: ID does not exist" containerID="cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.756105 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060"} err="failed to get container status \"cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060\": rpc error: code = NotFound desc = could not find container \"cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060\": container with ID starting with cc277db5a53e497e9ae746751c8e15eee238a4d8045857eae0f9f354c1175060 not found: ID does not exist" Dec 01 07:32:40 crc kubenswrapper[4822]: I1201 07:32:40.960351 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" path="/var/lib/kubelet/pods/126b8fd9-d056-43d2-a857-9d69e5d8fd46/volumes" Dec 01 07:32:41 crc kubenswrapper[4822]: I1201 07:32:41.951342 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:32:41 crc kubenswrapper[4822]: E1201 07:32:41.952378 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:32:43 crc kubenswrapper[4822]: I1201 07:32:43.641163 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:43 crc kubenswrapper[4822]: I1201 07:32:43.641262 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:43 crc kubenswrapper[4822]: I1201 07:32:43.714482 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:53 crc kubenswrapper[4822]: I1201 07:32:53.712388 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vgdjq" Dec 01 07:32:54 crc kubenswrapper[4822]: I1201 07:32:54.809813 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgdjq"] Dec 01 07:32:54 crc kubenswrapper[4822]: I1201 07:32:54.865943 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sbxr5"] Dec 01 07:32:54 crc kubenswrapper[4822]: I1201 07:32:54.866232 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sbxr5" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="registry-server" containerID="cri-o://b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382" gracePeriod=2 Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.257837 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.294200 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-utilities\") pod \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.294249 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4ppw\" (UniqueName: \"kubernetes.io/projected/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-kube-api-access-w4ppw\") pod \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.294316 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-catalog-content\") pod \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\" (UID: \"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d\") " Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.296036 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-utilities" (OuterVolumeSpecName: "utilities") pod "ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" (UID: "ff4c68cf-fcf3-4b37-b07f-61a8e226c28d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.300336 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-kube-api-access-w4ppw" (OuterVolumeSpecName: "kube-api-access-w4ppw") pod "ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" (UID: "ff4c68cf-fcf3-4b37-b07f-61a8e226c28d"). InnerVolumeSpecName "kube-api-access-w4ppw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.365126 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" (UID: "ff4c68cf-fcf3-4b37-b07f-61a8e226c28d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.395580 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4ppw\" (UniqueName: \"kubernetes.io/projected/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-kube-api-access-w4ppw\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.395628 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.395640 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.861126 4822 generic.go:334] "Generic (PLEG): container finished" podID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerID="b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382" exitCode=0 Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.861169 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerDied","Data":"b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382"} Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.861194 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sbxr5" event={"ID":"ff4c68cf-fcf3-4b37-b07f-61a8e226c28d","Type":"ContainerDied","Data":"568ce3e050c4f53b7f57731650975cc2b82893ec4f59e55b75c10cc67cdff560"} Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.861211 4822 scope.go:117] "RemoveContainer" containerID="b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.861320 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sbxr5" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.884464 4822 scope.go:117] "RemoveContainer" containerID="cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.909640 4822 scope.go:117] "RemoveContainer" containerID="5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.914638 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sbxr5"] Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.922542 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sbxr5"] Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.937295 4822 scope.go:117] "RemoveContainer" containerID="b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382" Dec 01 07:32:55 crc kubenswrapper[4822]: E1201 07:32:55.937831 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382\": container with ID starting with b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382 not found: ID does not exist" containerID="b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.937879 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382"} err="failed to get container status \"b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382\": rpc error: code = NotFound desc = could not find container \"b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382\": container with ID starting with b0104564129c378fee5e742a02378aea3eeac08dd126b40958151fbb1d7ee382 not found: ID does not exist" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.937917 4822 scope.go:117] "RemoveContainer" containerID="cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38" Dec 01 07:32:55 crc kubenswrapper[4822]: E1201 07:32:55.938199 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38\": container with ID starting with cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38 not found: ID does not exist" containerID="cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.938227 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38"} err="failed to get container status \"cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38\": rpc error: code = NotFound desc = could not find container \"cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38\": container with ID starting with cb663cff8e90f0dbd94124a055f45dad3ec86ce6bc97e615cd7ccce64da7ba38 not found: ID does not exist" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.938240 4822 scope.go:117] "RemoveContainer" containerID="5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7" Dec 01 07:32:55 crc kubenswrapper[4822]: E1201 07:32:55.938471 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7\": container with ID starting with 5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7 not found: ID does not exist" containerID="5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.938501 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7"} err="failed to get container status \"5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7\": rpc error: code = NotFound desc = could not find container \"5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7\": container with ID starting with 5cb0fbcec35bd1b65b2944b379e64d27f92098ba597585aeb1febcbbffdfdbe7 not found: ID does not exist" Dec 01 07:32:55 crc kubenswrapper[4822]: I1201 07:32:55.951272 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:32:56 crc kubenswrapper[4822]: I1201 07:32:56.873668 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"4dbb2d0d89f5a0101cf6c12a3c48d2d5bed21a8b4fb1033b26bfb2217392e13f"} Dec 01 07:32:56 crc kubenswrapper[4822]: I1201 07:32:56.963132 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" path="/var/lib/kubelet/pods/ff4c68cf-fcf3-4b37-b07f-61a8e226c28d/volumes" Dec 01 07:35:12 crc kubenswrapper[4822]: I1201 07:35:12.543332 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:35:12 crc kubenswrapper[4822]: I1201 07:35:12.544158 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.917802 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mgzrx"] Dec 01 07:35:24 crc kubenswrapper[4822]: E1201 07:35:24.918767 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="extract-utilities" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.918790 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="extract-utilities" Dec 01 07:35:24 crc kubenswrapper[4822]: E1201 07:35:24.918811 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="extract-content" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.918823 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="extract-content" Dec 01 07:35:24 crc kubenswrapper[4822]: E1201 07:35:24.918840 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="extract-utilities" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.918853 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="extract-utilities" Dec 01 07:35:24 crc kubenswrapper[4822]: E1201 07:35:24.918869 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="registry-server" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.918881 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="registry-server" Dec 01 07:35:24 crc kubenswrapper[4822]: E1201 07:35:24.918903 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="extract-content" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.918915 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="extract-content" Dec 01 07:35:24 crc kubenswrapper[4822]: E1201 07:35:24.918951 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="registry-server" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.918963 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="registry-server" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.919225 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4c68cf-fcf3-4b37-b07f-61a8e226c28d" containerName="registry-server" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.919258 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="126b8fd9-d056-43d2-a857-9d69e5d8fd46" containerName="registry-server" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.921006 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.926808 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j7qw\" (UniqueName: \"kubernetes.io/projected/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-kube-api-access-8j7qw\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.926894 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-utilities\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.926921 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-catalog-content\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:24 crc kubenswrapper[4822]: I1201 07:35:24.940989 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mgzrx"] Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.027970 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j7qw\" (UniqueName: \"kubernetes.io/projected/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-kube-api-access-8j7qw\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.028043 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-utilities\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.028232 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-catalog-content\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.028700 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-utilities\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.028910 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-catalog-content\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.055789 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j7qw\" (UniqueName: \"kubernetes.io/projected/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-kube-api-access-8j7qw\") pod \"redhat-operators-mgzrx\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.245212 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:25 crc kubenswrapper[4822]: I1201 07:35:25.676253 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mgzrx"] Dec 01 07:35:26 crc kubenswrapper[4822]: I1201 07:35:26.236695 4822 generic.go:334] "Generic (PLEG): container finished" podID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerID="4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29" exitCode=0 Dec 01 07:35:26 crc kubenswrapper[4822]: I1201 07:35:26.236820 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerDied","Data":"4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29"} Dec 01 07:35:26 crc kubenswrapper[4822]: I1201 07:35:26.237187 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerStarted","Data":"e84da4ba883c51d6002aa3e575a1509a89a28a01827373a116aa3db0bae725ab"} Dec 01 07:35:26 crc kubenswrapper[4822]: I1201 07:35:26.240894 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:35:27 crc kubenswrapper[4822]: I1201 07:35:27.248103 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerStarted","Data":"c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b"} Dec 01 07:35:28 crc kubenswrapper[4822]: I1201 07:35:28.261478 4822 generic.go:334] "Generic (PLEG): container finished" podID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerID="c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b" exitCode=0 Dec 01 07:35:28 crc kubenswrapper[4822]: I1201 07:35:28.261580 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerDied","Data":"c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b"} Dec 01 07:35:29 crc kubenswrapper[4822]: I1201 07:35:29.272328 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerStarted","Data":"2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac"} Dec 01 07:35:29 crc kubenswrapper[4822]: I1201 07:35:29.302912 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mgzrx" podStartSLOduration=2.855550189 podStartE2EDuration="5.302882293s" podCreationTimestamp="2025-12-01 07:35:24 +0000 UTC" firstStartedPulling="2025-12-01 07:35:26.240640414 +0000 UTC m=+2681.561448110" lastFinishedPulling="2025-12-01 07:35:28.687972498 +0000 UTC m=+2684.008780214" observedRunningTime="2025-12-01 07:35:29.293673814 +0000 UTC m=+2684.614481500" watchObservedRunningTime="2025-12-01 07:35:29.302882293 +0000 UTC m=+2684.623690009" Dec 01 07:35:35 crc kubenswrapper[4822]: I1201 07:35:35.245952 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:35 crc kubenswrapper[4822]: I1201 07:35:35.246611 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:35 crc kubenswrapper[4822]: I1201 07:35:35.310253 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:35 crc kubenswrapper[4822]: I1201 07:35:35.398467 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:38 crc kubenswrapper[4822]: I1201 07:35:38.908719 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mgzrx"] Dec 01 07:35:38 crc kubenswrapper[4822]: I1201 07:35:38.911270 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mgzrx" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="registry-server" containerID="cri-o://2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac" gracePeriod=2 Dec 01 07:35:39 crc kubenswrapper[4822]: I1201 07:35:39.906865 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.050703 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j7qw\" (UniqueName: \"kubernetes.io/projected/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-kube-api-access-8j7qw\") pod \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.050795 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-catalog-content\") pod \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.050827 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-utilities\") pod \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\" (UID: \"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5\") " Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.052000 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-utilities" (OuterVolumeSpecName: "utilities") pod "9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" (UID: "9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.057967 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-kube-api-access-8j7qw" (OuterVolumeSpecName: "kube-api-access-8j7qw") pod "9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" (UID: "9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5"). InnerVolumeSpecName "kube-api-access-8j7qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.153449 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j7qw\" (UniqueName: \"kubernetes.io/projected/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-kube-api-access-8j7qw\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.153484 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.169643 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" (UID: "9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.254821 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.382470 4822 generic.go:334] "Generic (PLEG): container finished" podID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerID="2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac" exitCode=0 Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.382540 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerDied","Data":"2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac"} Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.382590 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mgzrx" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.382637 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mgzrx" event={"ID":"9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5","Type":"ContainerDied","Data":"e84da4ba883c51d6002aa3e575a1509a89a28a01827373a116aa3db0bae725ab"} Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.382675 4822 scope.go:117] "RemoveContainer" containerID="2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.432473 4822 scope.go:117] "RemoveContainer" containerID="c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.516353 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mgzrx"] Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.521663 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mgzrx"] Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.524495 4822 scope.go:117] "RemoveContainer" containerID="4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.544131 4822 scope.go:117] "RemoveContainer" containerID="2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac" Dec 01 07:35:40 crc kubenswrapper[4822]: E1201 07:35:40.544593 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac\": container with ID starting with 2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac not found: ID does not exist" containerID="2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.544647 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac"} err="failed to get container status \"2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac\": rpc error: code = NotFound desc = could not find container \"2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac\": container with ID starting with 2d7f92c0d8647f29c0bed359a3eed1d2f5e3f7bdc38d207bc7522e1e986c53ac not found: ID does not exist" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.544678 4822 scope.go:117] "RemoveContainer" containerID="c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b" Dec 01 07:35:40 crc kubenswrapper[4822]: E1201 07:35:40.545014 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b\": container with ID starting with c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b not found: ID does not exist" containerID="c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.545041 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b"} err="failed to get container status \"c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b\": rpc error: code = NotFound desc = could not find container \"c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b\": container with ID starting with c02e324c55e65393ac47118e45a72258e9885e9b320309bb92806e3e987ffb7b not found: ID does not exist" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.545054 4822 scope.go:117] "RemoveContainer" containerID="4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29" Dec 01 07:35:40 crc kubenswrapper[4822]: E1201 07:35:40.545311 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29\": container with ID starting with 4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29 not found: ID does not exist" containerID="4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.545344 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29"} err="failed to get container status \"4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29\": rpc error: code = NotFound desc = could not find container \"4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29\": container with ID starting with 4aebc26a5d41aed8cfd03bcc74a4ff03a63e990a2f8d63ba8f022672097e3c29 not found: ID does not exist" Dec 01 07:35:40 crc kubenswrapper[4822]: I1201 07:35:40.963668 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" path="/var/lib/kubelet/pods/9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5/volumes" Dec 01 07:35:42 crc kubenswrapper[4822]: I1201 07:35:42.543119 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:35:42 crc kubenswrapper[4822]: I1201 07:35:42.543195 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.543096 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.543861 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.543921 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.544929 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4dbb2d0d89f5a0101cf6c12a3c48d2d5bed21a8b4fb1033b26bfb2217392e13f"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.545030 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://4dbb2d0d89f5a0101cf6c12a3c48d2d5bed21a8b4fb1033b26bfb2217392e13f" gracePeriod=600 Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.694953 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="4dbb2d0d89f5a0101cf6c12a3c48d2d5bed21a8b4fb1033b26bfb2217392e13f" exitCode=0 Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.695039 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"4dbb2d0d89f5a0101cf6c12a3c48d2d5bed21a8b4fb1033b26bfb2217392e13f"} Dec 01 07:36:12 crc kubenswrapper[4822]: I1201 07:36:12.695591 4822 scope.go:117] "RemoveContainer" containerID="879febeda776938d1780a268e5c84163b3b94de1580a13353470c3b5b038eb79" Dec 01 07:36:13 crc kubenswrapper[4822]: I1201 07:36:13.708679 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919"} Dec 01 07:38:12 crc kubenswrapper[4822]: I1201 07:38:12.543685 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:38:12 crc kubenswrapper[4822]: I1201 07:38:12.544429 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:38:42 crc kubenswrapper[4822]: I1201 07:38:42.660234 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:38:42 crc kubenswrapper[4822]: I1201 07:38:42.661267 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.543058 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.543809 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.543866 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.544682 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.544782 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" gracePeriod=600 Dec 01 07:39:12 crc kubenswrapper[4822]: E1201 07:39:12.694176 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.948087 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" exitCode=0 Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.948156 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919"} Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.948243 4822 scope.go:117] "RemoveContainer" containerID="4dbb2d0d89f5a0101cf6c12a3c48d2d5bed21a8b4fb1033b26bfb2217392e13f" Dec 01 07:39:12 crc kubenswrapper[4822]: I1201 07:39:12.948800 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:39:12 crc kubenswrapper[4822]: E1201 07:39:12.949135 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:39:25 crc kubenswrapper[4822]: I1201 07:39:25.951458 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:39:25 crc kubenswrapper[4822]: E1201 07:39:25.952293 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:39:36 crc kubenswrapper[4822]: I1201 07:39:36.951235 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:39:36 crc kubenswrapper[4822]: E1201 07:39:36.952059 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:39:47 crc kubenswrapper[4822]: I1201 07:39:47.951016 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:39:47 crc kubenswrapper[4822]: E1201 07:39:47.951782 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:40:02 crc kubenswrapper[4822]: I1201 07:40:02.951439 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:40:02 crc kubenswrapper[4822]: E1201 07:40:02.952386 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:40:13 crc kubenswrapper[4822]: I1201 07:40:13.951978 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:40:13 crc kubenswrapper[4822]: E1201 07:40:13.952853 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:40:24 crc kubenswrapper[4822]: I1201 07:40:24.957858 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:40:24 crc kubenswrapper[4822]: E1201 07:40:24.958832 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:40:37 crc kubenswrapper[4822]: I1201 07:40:37.951729 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:40:37 crc kubenswrapper[4822]: E1201 07:40:37.952927 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:40:52 crc kubenswrapper[4822]: I1201 07:40:52.951347 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:40:52 crc kubenswrapper[4822]: E1201 07:40:52.952456 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:41:04 crc kubenswrapper[4822]: I1201 07:41:04.960585 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:41:04 crc kubenswrapper[4822]: E1201 07:41:04.961709 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:41:19 crc kubenswrapper[4822]: I1201 07:41:19.952162 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:41:19 crc kubenswrapper[4822]: E1201 07:41:19.953157 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:41:30 crc kubenswrapper[4822]: I1201 07:41:30.951141 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:41:30 crc kubenswrapper[4822]: E1201 07:41:30.951912 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:41:44 crc kubenswrapper[4822]: I1201 07:41:44.959510 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:41:44 crc kubenswrapper[4822]: E1201 07:41:44.960676 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:41:58 crc kubenswrapper[4822]: I1201 07:41:58.951264 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:41:58 crc kubenswrapper[4822]: E1201 07:41:58.952085 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:42:12 crc kubenswrapper[4822]: I1201 07:42:12.951157 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:42:12 crc kubenswrapper[4822]: E1201 07:42:12.952731 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:42:25 crc kubenswrapper[4822]: I1201 07:42:25.950624 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:42:25 crc kubenswrapper[4822]: E1201 07:42:25.951553 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:42:36 crc kubenswrapper[4822]: I1201 07:42:36.951340 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:42:36 crc kubenswrapper[4822]: E1201 07:42:36.952192 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:42:47 crc kubenswrapper[4822]: I1201 07:42:47.950262 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:42:47 crc kubenswrapper[4822]: E1201 07:42:47.950901 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:43:01 crc kubenswrapper[4822]: I1201 07:43:01.950714 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:43:01 crc kubenswrapper[4822]: E1201 07:43:01.951324 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:43:16 crc kubenswrapper[4822]: I1201 07:43:16.951116 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:43:16 crc kubenswrapper[4822]: E1201 07:43:16.951839 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.062314 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cj5lm"] Dec 01 07:43:26 crc kubenswrapper[4822]: E1201 07:43:26.063314 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="registry-server" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.063334 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="registry-server" Dec 01 07:43:26 crc kubenswrapper[4822]: E1201 07:43:26.063362 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="extract-utilities" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.063370 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="extract-utilities" Dec 01 07:43:26 crc kubenswrapper[4822]: E1201 07:43:26.063387 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="extract-content" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.063399 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="extract-content" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.063603 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b2062c2-c1c0-463f-a1c6-04cfd10fa5d5" containerName="registry-server" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.064867 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.077445 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cj5lm"] Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.150382 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-utilities\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.150435 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-catalog-content\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.150512 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btq7n\" (UniqueName: \"kubernetes.io/projected/307074b3-74b7-4c4e-bb90-e91edb88859f-kube-api-access-btq7n\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.251529 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-utilities\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.251854 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-catalog-content\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.251907 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btq7n\" (UniqueName: \"kubernetes.io/projected/307074b3-74b7-4c4e-bb90-e91edb88859f-kube-api-access-btq7n\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.252066 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-utilities\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.252397 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-catalog-content\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.273926 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btq7n\" (UniqueName: \"kubernetes.io/projected/307074b3-74b7-4c4e-bb90-e91edb88859f-kube-api-access-btq7n\") pod \"community-operators-cj5lm\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.400197 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:26 crc kubenswrapper[4822]: I1201 07:43:26.926980 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cj5lm"] Dec 01 07:43:27 crc kubenswrapper[4822]: I1201 07:43:27.447802 4822 generic.go:334] "Generic (PLEG): container finished" podID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerID="59a897c81dd6c23eccfb690927a2d1a1175c3e257194260f90762d9fe81b8af8" exitCode=0 Dec 01 07:43:27 crc kubenswrapper[4822]: I1201 07:43:27.447896 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerDied","Data":"59a897c81dd6c23eccfb690927a2d1a1175c3e257194260f90762d9fe81b8af8"} Dec 01 07:43:27 crc kubenswrapper[4822]: I1201 07:43:27.447989 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerStarted","Data":"d03e1b4a54027b1d30318d60ed29dc86b7268342d897f98b1dc6923954229e4d"} Dec 01 07:43:27 crc kubenswrapper[4822]: I1201 07:43:27.453488 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:43:28 crc kubenswrapper[4822]: I1201 07:43:28.455662 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerStarted","Data":"7045930310b4cafdc94027bdab1ef2ccc3375d74a690893796aef6fa7ae90e7b"} Dec 01 07:43:29 crc kubenswrapper[4822]: I1201 07:43:29.465612 4822 generic.go:334] "Generic (PLEG): container finished" podID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerID="7045930310b4cafdc94027bdab1ef2ccc3375d74a690893796aef6fa7ae90e7b" exitCode=0 Dec 01 07:43:29 crc kubenswrapper[4822]: I1201 07:43:29.465733 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerDied","Data":"7045930310b4cafdc94027bdab1ef2ccc3375d74a690893796aef6fa7ae90e7b"} Dec 01 07:43:29 crc kubenswrapper[4822]: I1201 07:43:29.951111 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:43:29 crc kubenswrapper[4822]: E1201 07:43:29.951366 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:43:30 crc kubenswrapper[4822]: I1201 07:43:30.475772 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerStarted","Data":"ce99c7bd9c2351808108f187c73f6f2af3dab156951fb8eb3d6cfcd6585b0b90"} Dec 01 07:43:30 crc kubenswrapper[4822]: I1201 07:43:30.500780 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cj5lm" podStartSLOduration=1.902366986 podStartE2EDuration="4.500751168s" podCreationTimestamp="2025-12-01 07:43:26 +0000 UTC" firstStartedPulling="2025-12-01 07:43:27.452666771 +0000 UTC m=+3162.773474487" lastFinishedPulling="2025-12-01 07:43:30.051050943 +0000 UTC m=+3165.371858669" observedRunningTime="2025-12-01 07:43:30.493140932 +0000 UTC m=+3165.813948618" watchObservedRunningTime="2025-12-01 07:43:30.500751168 +0000 UTC m=+3165.821558864" Dec 01 07:43:36 crc kubenswrapper[4822]: I1201 07:43:36.401197 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:36 crc kubenswrapper[4822]: I1201 07:43:36.401835 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:36 crc kubenswrapper[4822]: I1201 07:43:36.462710 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:36 crc kubenswrapper[4822]: I1201 07:43:36.565735 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:36 crc kubenswrapper[4822]: I1201 07:43:36.698158 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cj5lm"] Dec 01 07:43:38 crc kubenswrapper[4822]: I1201 07:43:38.538008 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cj5lm" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="registry-server" containerID="cri-o://ce99c7bd9c2351808108f187c73f6f2af3dab156951fb8eb3d6cfcd6585b0b90" gracePeriod=2 Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.546466 4822 generic.go:334] "Generic (PLEG): container finished" podID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerID="ce99c7bd9c2351808108f187c73f6f2af3dab156951fb8eb3d6cfcd6585b0b90" exitCode=0 Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.546723 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerDied","Data":"ce99c7bd9c2351808108f187c73f6f2af3dab156951fb8eb3d6cfcd6585b0b90"} Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.658818 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.753743 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-utilities\") pod \"307074b3-74b7-4c4e-bb90-e91edb88859f\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.753878 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-catalog-content\") pod \"307074b3-74b7-4c4e-bb90-e91edb88859f\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.753926 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btq7n\" (UniqueName: \"kubernetes.io/projected/307074b3-74b7-4c4e-bb90-e91edb88859f-kube-api-access-btq7n\") pod \"307074b3-74b7-4c4e-bb90-e91edb88859f\" (UID: \"307074b3-74b7-4c4e-bb90-e91edb88859f\") " Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.755018 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-utilities" (OuterVolumeSpecName: "utilities") pod "307074b3-74b7-4c4e-bb90-e91edb88859f" (UID: "307074b3-74b7-4c4e-bb90-e91edb88859f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.760087 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/307074b3-74b7-4c4e-bb90-e91edb88859f-kube-api-access-btq7n" (OuterVolumeSpecName: "kube-api-access-btq7n") pod "307074b3-74b7-4c4e-bb90-e91edb88859f" (UID: "307074b3-74b7-4c4e-bb90-e91edb88859f"). InnerVolumeSpecName "kube-api-access-btq7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.824564 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "307074b3-74b7-4c4e-bb90-e91edb88859f" (UID: "307074b3-74b7-4c4e-bb90-e91edb88859f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.855630 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.855667 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/307074b3-74b7-4c4e-bb90-e91edb88859f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:43:39 crc kubenswrapper[4822]: I1201 07:43:39.855684 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btq7n\" (UniqueName: \"kubernetes.io/projected/307074b3-74b7-4c4e-bb90-e91edb88859f-kube-api-access-btq7n\") on node \"crc\" DevicePath \"\"" Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.557376 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cj5lm" event={"ID":"307074b3-74b7-4c4e-bb90-e91edb88859f","Type":"ContainerDied","Data":"d03e1b4a54027b1d30318d60ed29dc86b7268342d897f98b1dc6923954229e4d"} Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.557794 4822 scope.go:117] "RemoveContainer" containerID="ce99c7bd9c2351808108f187c73f6f2af3dab156951fb8eb3d6cfcd6585b0b90" Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.557500 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cj5lm" Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.599155 4822 scope.go:117] "RemoveContainer" containerID="7045930310b4cafdc94027bdab1ef2ccc3375d74a690893796aef6fa7ae90e7b" Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.615694 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cj5lm"] Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.623760 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cj5lm"] Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.641440 4822 scope.go:117] "RemoveContainer" containerID="59a897c81dd6c23eccfb690927a2d1a1175c3e257194260f90762d9fe81b8af8" Dec 01 07:43:40 crc kubenswrapper[4822]: I1201 07:43:40.966308 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" path="/var/lib/kubelet/pods/307074b3-74b7-4c4e-bb90-e91edb88859f/volumes" Dec 01 07:43:42 crc kubenswrapper[4822]: I1201 07:43:42.950762 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:43:42 crc kubenswrapper[4822]: E1201 07:43:42.951450 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:43:56 crc kubenswrapper[4822]: I1201 07:43:56.952086 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:43:56 crc kubenswrapper[4822]: E1201 07:43:56.953156 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:44:09 crc kubenswrapper[4822]: I1201 07:44:09.951394 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:44:09 crc kubenswrapper[4822]: E1201 07:44:09.952650 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:44:24 crc kubenswrapper[4822]: I1201 07:44:24.957521 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:44:25 crc kubenswrapper[4822]: I1201 07:44:25.995256 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"57a885c079c73341d6724e1c97563efa990b89a27b8bd077eb8da1976dad0f24"} Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.171718 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb"] Dec 01 07:45:00 crc kubenswrapper[4822]: E1201 07:45:00.172530 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="extract-utilities" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.172564 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="extract-utilities" Dec 01 07:45:00 crc kubenswrapper[4822]: E1201 07:45:00.172588 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="extract-content" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.172593 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="extract-content" Dec 01 07:45:00 crc kubenswrapper[4822]: E1201 07:45:00.172605 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="registry-server" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.172611 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="registry-server" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.172766 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="307074b3-74b7-4c4e-bb90-e91edb88859f" containerName="registry-server" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.173259 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.175409 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.175835 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.188398 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb"] Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.230753 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/105964ba-59ad-4401-a3ff-9f0ac3bb7838-secret-volume\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.230824 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/105964ba-59ad-4401-a3ff-9f0ac3bb7838-config-volume\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.230865 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktmrn\" (UniqueName: \"kubernetes.io/projected/105964ba-59ad-4401-a3ff-9f0ac3bb7838-kube-api-access-ktmrn\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.332509 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/105964ba-59ad-4401-a3ff-9f0ac3bb7838-secret-volume\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.332664 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/105964ba-59ad-4401-a3ff-9f0ac3bb7838-config-volume\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.332737 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktmrn\" (UniqueName: \"kubernetes.io/projected/105964ba-59ad-4401-a3ff-9f0ac3bb7838-kube-api-access-ktmrn\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.333926 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/105964ba-59ad-4401-a3ff-9f0ac3bb7838-config-volume\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.338293 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/105964ba-59ad-4401-a3ff-9f0ac3bb7838-secret-volume\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.356709 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktmrn\" (UniqueName: \"kubernetes.io/projected/105964ba-59ad-4401-a3ff-9f0ac3bb7838-kube-api-access-ktmrn\") pod \"collect-profiles-29409585-gzhvb\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.500730 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:00 crc kubenswrapper[4822]: I1201 07:45:00.942416 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb"] Dec 01 07:45:00 crc kubenswrapper[4822]: W1201 07:45:00.944387 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod105964ba_59ad_4401_a3ff_9f0ac3bb7838.slice/crio-41e99258ac469ab6b62615314d5a1906d646981a4f5664fee02e7f427a02b6a0 WatchSource:0}: Error finding container 41e99258ac469ab6b62615314d5a1906d646981a4f5664fee02e7f427a02b6a0: Status 404 returned error can't find the container with id 41e99258ac469ab6b62615314d5a1906d646981a4f5664fee02e7f427a02b6a0 Dec 01 07:45:01 crc kubenswrapper[4822]: I1201 07:45:01.364167 4822 generic.go:334] "Generic (PLEG): container finished" podID="105964ba-59ad-4401-a3ff-9f0ac3bb7838" containerID="7bfd6d1f7a9ab86bcf1e0a04da8dab4e66d74bb32f2b0918b0bfcb3abaf8e11e" exitCode=0 Dec 01 07:45:01 crc kubenswrapper[4822]: I1201 07:45:01.364216 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" event={"ID":"105964ba-59ad-4401-a3ff-9f0ac3bb7838","Type":"ContainerDied","Data":"7bfd6d1f7a9ab86bcf1e0a04da8dab4e66d74bb32f2b0918b0bfcb3abaf8e11e"} Dec 01 07:45:01 crc kubenswrapper[4822]: I1201 07:45:01.364245 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" event={"ID":"105964ba-59ad-4401-a3ff-9f0ac3bb7838","Type":"ContainerStarted","Data":"41e99258ac469ab6b62615314d5a1906d646981a4f5664fee02e7f427a02b6a0"} Dec 01 07:45:01 crc kubenswrapper[4822]: E1201 07:45:01.407362 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod105964ba_59ad_4401_a3ff_9f0ac3bb7838.slice/crio-conmon-7bfd6d1f7a9ab86bcf1e0a04da8dab4e66d74bb32f2b0918b0bfcb3abaf8e11e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod105964ba_59ad_4401_a3ff_9f0ac3bb7838.slice/crio-7bfd6d1f7a9ab86bcf1e0a04da8dab4e66d74bb32f2b0918b0bfcb3abaf8e11e.scope\": RecentStats: unable to find data in memory cache]" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.747942 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.863752 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/105964ba-59ad-4401-a3ff-9f0ac3bb7838-config-volume\") pod \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.863794 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktmrn\" (UniqueName: \"kubernetes.io/projected/105964ba-59ad-4401-a3ff-9f0ac3bb7838-kube-api-access-ktmrn\") pod \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.863823 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/105964ba-59ad-4401-a3ff-9f0ac3bb7838-secret-volume\") pod \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\" (UID: \"105964ba-59ad-4401-a3ff-9f0ac3bb7838\") " Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.865029 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/105964ba-59ad-4401-a3ff-9f0ac3bb7838-config-volume" (OuterVolumeSpecName: "config-volume") pod "105964ba-59ad-4401-a3ff-9f0ac3bb7838" (UID: "105964ba-59ad-4401-a3ff-9f0ac3bb7838"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.865266 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/105964ba-59ad-4401-a3ff-9f0ac3bb7838-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.871760 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/105964ba-59ad-4401-a3ff-9f0ac3bb7838-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "105964ba-59ad-4401-a3ff-9f0ac3bb7838" (UID: "105964ba-59ad-4401-a3ff-9f0ac3bb7838"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.876870 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/105964ba-59ad-4401-a3ff-9f0ac3bb7838-kube-api-access-ktmrn" (OuterVolumeSpecName: "kube-api-access-ktmrn") pod "105964ba-59ad-4401-a3ff-9f0ac3bb7838" (UID: "105964ba-59ad-4401-a3ff-9f0ac3bb7838"). InnerVolumeSpecName "kube-api-access-ktmrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.966230 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktmrn\" (UniqueName: \"kubernetes.io/projected/105964ba-59ad-4401-a3ff-9f0ac3bb7838-kube-api-access-ktmrn\") on node \"crc\" DevicePath \"\"" Dec 01 07:45:02 crc kubenswrapper[4822]: I1201 07:45:02.966274 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/105964ba-59ad-4401-a3ff-9f0ac3bb7838-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:45:03 crc kubenswrapper[4822]: I1201 07:45:03.384700 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" event={"ID":"105964ba-59ad-4401-a3ff-9f0ac3bb7838","Type":"ContainerDied","Data":"41e99258ac469ab6b62615314d5a1906d646981a4f5664fee02e7f427a02b6a0"} Dec 01 07:45:03 crc kubenswrapper[4822]: I1201 07:45:03.384760 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41e99258ac469ab6b62615314d5a1906d646981a4f5664fee02e7f427a02b6a0" Dec 01 07:45:03 crc kubenswrapper[4822]: I1201 07:45:03.384774 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb" Dec 01 07:45:03 crc kubenswrapper[4822]: I1201 07:45:03.824942 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b"] Dec 01 07:45:03 crc kubenswrapper[4822]: I1201 07:45:03.831657 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-xsr8b"] Dec 01 07:45:04 crc kubenswrapper[4822]: I1201 07:45:04.971892 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8170a546-833c-4ef5-8111-ffabc9faf33a" path="/var/lib/kubelet/pods/8170a546-833c-4ef5-8111-ffabc9faf33a/volumes" Dec 01 07:45:50 crc kubenswrapper[4822]: I1201 07:45:50.687024 4822 scope.go:117] "RemoveContainer" containerID="bda96f79472d7d55f3bea2e6548d70a4afc1ebaa3a1ddd69a92cd9218e9d3a29" Dec 01 07:46:01 crc kubenswrapper[4822]: I1201 07:46:01.944700 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hdr68"] Dec 01 07:46:01 crc kubenswrapper[4822]: E1201 07:46:01.946325 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="105964ba-59ad-4401-a3ff-9f0ac3bb7838" containerName="collect-profiles" Dec 01 07:46:01 crc kubenswrapper[4822]: I1201 07:46:01.946353 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="105964ba-59ad-4401-a3ff-9f0ac3bb7838" containerName="collect-profiles" Dec 01 07:46:01 crc kubenswrapper[4822]: I1201 07:46:01.946708 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="105964ba-59ad-4401-a3ff-9f0ac3bb7838" containerName="collect-profiles" Dec 01 07:46:01 crc kubenswrapper[4822]: I1201 07:46:01.948873 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:01 crc kubenswrapper[4822]: I1201 07:46:01.980332 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hdr68"] Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.135663 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s2cs\" (UniqueName: \"kubernetes.io/projected/23808967-21cf-4131-97fb-76317914940a-kube-api-access-5s2cs\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.136026 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-utilities\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.136498 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-catalog-content\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.238454 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s2cs\" (UniqueName: \"kubernetes.io/projected/23808967-21cf-4131-97fb-76317914940a-kube-api-access-5s2cs\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.238526 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-utilities\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.238633 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-catalog-content\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.239018 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-utilities\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.239090 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-catalog-content\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.258568 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s2cs\" (UniqueName: \"kubernetes.io/projected/23808967-21cf-4131-97fb-76317914940a-kube-api-access-5s2cs\") pod \"redhat-operators-hdr68\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.275007 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.783322 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hdr68"] Dec 01 07:46:02 crc kubenswrapper[4822]: I1201 07:46:02.958193 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerStarted","Data":"26810ea5bf8c7021cee1a3311927f18d8e1a1f265906f4162198c09471f390d5"} Dec 01 07:46:03 crc kubenswrapper[4822]: I1201 07:46:03.963567 4822 generic.go:334] "Generic (PLEG): container finished" podID="23808967-21cf-4131-97fb-76317914940a" containerID="779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a" exitCode=0 Dec 01 07:46:03 crc kubenswrapper[4822]: I1201 07:46:03.963642 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerDied","Data":"779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a"} Dec 01 07:46:04 crc kubenswrapper[4822]: I1201 07:46:04.973683 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerStarted","Data":"86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1"} Dec 01 07:46:05 crc kubenswrapper[4822]: I1201 07:46:05.980927 4822 generic.go:334] "Generic (PLEG): container finished" podID="23808967-21cf-4131-97fb-76317914940a" containerID="86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1" exitCode=0 Dec 01 07:46:05 crc kubenswrapper[4822]: I1201 07:46:05.981007 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerDied","Data":"86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1"} Dec 01 07:46:06 crc kubenswrapper[4822]: I1201 07:46:06.990995 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerStarted","Data":"980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a"} Dec 01 07:46:07 crc kubenswrapper[4822]: I1201 07:46:07.018030 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hdr68" podStartSLOduration=3.540314956 podStartE2EDuration="6.018010201s" podCreationTimestamp="2025-12-01 07:46:01 +0000 UTC" firstStartedPulling="2025-12-01 07:46:03.965812447 +0000 UTC m=+3319.286620173" lastFinishedPulling="2025-12-01 07:46:06.443507682 +0000 UTC m=+3321.764315418" observedRunningTime="2025-12-01 07:46:07.008696154 +0000 UTC m=+3322.329503840" watchObservedRunningTime="2025-12-01 07:46:07.018010201 +0000 UTC m=+3322.338817887" Dec 01 07:46:12 crc kubenswrapper[4822]: I1201 07:46:12.276322 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:12 crc kubenswrapper[4822]: I1201 07:46:12.276708 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:13 crc kubenswrapper[4822]: I1201 07:46:13.334349 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hdr68" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="registry-server" probeResult="failure" output=< Dec 01 07:46:13 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 07:46:13 crc kubenswrapper[4822]: > Dec 01 07:46:22 crc kubenswrapper[4822]: I1201 07:46:22.345532 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:22 crc kubenswrapper[4822]: I1201 07:46:22.396350 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:22 crc kubenswrapper[4822]: I1201 07:46:22.583626 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hdr68"] Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.158931 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hdr68" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="registry-server" containerID="cri-o://980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a" gracePeriod=2 Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.599432 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.726504 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-catalog-content\") pod \"23808967-21cf-4131-97fb-76317914940a\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.735789 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5s2cs\" (UniqueName: \"kubernetes.io/projected/23808967-21cf-4131-97fb-76317914940a-kube-api-access-5s2cs\") pod \"23808967-21cf-4131-97fb-76317914940a\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.735854 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-utilities\") pod \"23808967-21cf-4131-97fb-76317914940a\" (UID: \"23808967-21cf-4131-97fb-76317914940a\") " Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.736748 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-utilities" (OuterVolumeSpecName: "utilities") pod "23808967-21cf-4131-97fb-76317914940a" (UID: "23808967-21cf-4131-97fb-76317914940a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.741430 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23808967-21cf-4131-97fb-76317914940a-kube-api-access-5s2cs" (OuterVolumeSpecName: "kube-api-access-5s2cs") pod "23808967-21cf-4131-97fb-76317914940a" (UID: "23808967-21cf-4131-97fb-76317914940a"). InnerVolumeSpecName "kube-api-access-5s2cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.837487 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5s2cs\" (UniqueName: \"kubernetes.io/projected/23808967-21cf-4131-97fb-76317914940a-kube-api-access-5s2cs\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.837525 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.844515 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23808967-21cf-4131-97fb-76317914940a" (UID: "23808967-21cf-4131-97fb-76317914940a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:46:24 crc kubenswrapper[4822]: I1201 07:46:24.938533 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23808967-21cf-4131-97fb-76317914940a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.172920 4822 generic.go:334] "Generic (PLEG): container finished" podID="23808967-21cf-4131-97fb-76317914940a" containerID="980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a" exitCode=0 Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.173013 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerDied","Data":"980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a"} Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.173053 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdr68" event={"ID":"23808967-21cf-4131-97fb-76317914940a","Type":"ContainerDied","Data":"26810ea5bf8c7021cee1a3311927f18d8e1a1f265906f4162198c09471f390d5"} Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.173060 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdr68" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.173082 4822 scope.go:117] "RemoveContainer" containerID="980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.201852 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hdr68"] Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.205790 4822 scope.go:117] "RemoveContainer" containerID="86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.215316 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hdr68"] Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.229957 4822 scope.go:117] "RemoveContainer" containerID="779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.265740 4822 scope.go:117] "RemoveContainer" containerID="980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a" Dec 01 07:46:25 crc kubenswrapper[4822]: E1201 07:46:25.266236 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a\": container with ID starting with 980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a not found: ID does not exist" containerID="980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.266289 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a"} err="failed to get container status \"980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a\": rpc error: code = NotFound desc = could not find container \"980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a\": container with ID starting with 980162a7cc6d43d78c46079209fd0c49c35201e6dafc76f50e485ac4b3dab08a not found: ID does not exist" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.266326 4822 scope.go:117] "RemoveContainer" containerID="86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1" Dec 01 07:46:25 crc kubenswrapper[4822]: E1201 07:46:25.266902 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1\": container with ID starting with 86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1 not found: ID does not exist" containerID="86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.266932 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1"} err="failed to get container status \"86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1\": rpc error: code = NotFound desc = could not find container \"86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1\": container with ID starting with 86451c8be887e453327f0d2dbae1f007480e7a2dd022687986c335168e87d5e1 not found: ID does not exist" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.266951 4822 scope.go:117] "RemoveContainer" containerID="779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a" Dec 01 07:46:25 crc kubenswrapper[4822]: E1201 07:46:25.267220 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a\": container with ID starting with 779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a not found: ID does not exist" containerID="779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a" Dec 01 07:46:25 crc kubenswrapper[4822]: I1201 07:46:25.267253 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a"} err="failed to get container status \"779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a\": rpc error: code = NotFound desc = could not find container \"779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a\": container with ID starting with 779dd2faea6b56c48b5b39a6d37ee8f4492a0b7e43ffa5befe3d327fb6a3b97a not found: ID does not exist" Dec 01 07:46:26 crc kubenswrapper[4822]: I1201 07:46:26.965503 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23808967-21cf-4131-97fb-76317914940a" path="/var/lib/kubelet/pods/23808967-21cf-4131-97fb-76317914940a/volumes" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.402197 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-crksg"] Dec 01 07:46:28 crc kubenswrapper[4822]: E1201 07:46:28.402814 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="extract-content" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.402849 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="extract-content" Dec 01 07:46:28 crc kubenswrapper[4822]: E1201 07:46:28.402892 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="extract-utilities" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.402909 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="extract-utilities" Dec 01 07:46:28 crc kubenswrapper[4822]: E1201 07:46:28.402974 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="registry-server" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.402996 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="registry-server" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.403382 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="23808967-21cf-4131-97fb-76317914940a" containerName="registry-server" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.406124 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.419158 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crksg"] Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.589832 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qpd55"] Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.591673 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.594246 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-catalog-content\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.594332 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-catalog-content\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.594525 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgx8p\" (UniqueName: \"kubernetes.io/projected/506367d7-a03e-4e67-a3e3-6fe145a43af5-kube-api-access-fgx8p\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.594637 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwjbq\" (UniqueName: \"kubernetes.io/projected/ba1c525f-fef3-42d3-89fb-2797884cddb9-kube-api-access-cwjbq\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.594675 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-utilities\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.594707 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-utilities\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.602646 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qpd55"] Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.695719 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-utilities\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.695771 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-utilities\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.695946 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-catalog-content\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.695993 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-catalog-content\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.696016 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgx8p\" (UniqueName: \"kubernetes.io/projected/506367d7-a03e-4e67-a3e3-6fe145a43af5-kube-api-access-fgx8p\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.696048 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwjbq\" (UniqueName: \"kubernetes.io/projected/ba1c525f-fef3-42d3-89fb-2797884cddb9-kube-api-access-cwjbq\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.696195 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-utilities\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.696445 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-catalog-content\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.696517 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-utilities\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.696826 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-catalog-content\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.720432 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgx8p\" (UniqueName: \"kubernetes.io/projected/506367d7-a03e-4e67-a3e3-6fe145a43af5-kube-api-access-fgx8p\") pod \"certified-operators-crksg\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.725969 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwjbq\" (UniqueName: \"kubernetes.io/projected/ba1c525f-fef3-42d3-89fb-2797884cddb9-kube-api-access-cwjbq\") pod \"redhat-marketplace-qpd55\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.729420 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:28 crc kubenswrapper[4822]: I1201 07:46:28.906618 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:29 crc kubenswrapper[4822]: I1201 07:46:29.206191 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crksg"] Dec 01 07:46:29 crc kubenswrapper[4822]: I1201 07:46:29.397724 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qpd55"] Dec 01 07:46:30 crc kubenswrapper[4822]: I1201 07:46:30.231395 4822 generic.go:334] "Generic (PLEG): container finished" podID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerID="cd333181dea592a45252358bab04f9f57e4c1b8e62509361482783ff740a7165" exitCode=0 Dec 01 07:46:30 crc kubenswrapper[4822]: I1201 07:46:30.231607 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerDied","Data":"cd333181dea592a45252358bab04f9f57e4c1b8e62509361482783ff740a7165"} Dec 01 07:46:30 crc kubenswrapper[4822]: I1201 07:46:30.231933 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerStarted","Data":"c99a6db2d01665b1e24079cb9687cb9f5c22f658913f00ee6fde78a0563e90a0"} Dec 01 07:46:30 crc kubenswrapper[4822]: I1201 07:46:30.234918 4822 generic.go:334] "Generic (PLEG): container finished" podID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerID="bddf3e898ff122061e22898436ff2fc649c52c866e669b80fbbe69b84240602d" exitCode=0 Dec 01 07:46:30 crc kubenswrapper[4822]: I1201 07:46:30.234969 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerDied","Data":"bddf3e898ff122061e22898436ff2fc649c52c866e669b80fbbe69b84240602d"} Dec 01 07:46:30 crc kubenswrapper[4822]: I1201 07:46:30.235004 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerStarted","Data":"abf3b9d742fe16b4cf9c70aa93529fe2950221c92b98dd472d9060e4367d4d01"} Dec 01 07:46:31 crc kubenswrapper[4822]: I1201 07:46:31.244574 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerStarted","Data":"16063d3c8e028d33bc47791e222c33c79c7dd974a2916c0170287e02aa2c7eb8"} Dec 01 07:46:31 crc kubenswrapper[4822]: I1201 07:46:31.247205 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerStarted","Data":"b5963172b38b76d6860216047b5d529f4ff409e296909600e71d2fbac9a88671"} Dec 01 07:46:32 crc kubenswrapper[4822]: I1201 07:46:32.260167 4822 generic.go:334] "Generic (PLEG): container finished" podID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerID="16063d3c8e028d33bc47791e222c33c79c7dd974a2916c0170287e02aa2c7eb8" exitCode=0 Dec 01 07:46:32 crc kubenswrapper[4822]: I1201 07:46:32.260277 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerDied","Data":"16063d3c8e028d33bc47791e222c33c79c7dd974a2916c0170287e02aa2c7eb8"} Dec 01 07:46:32 crc kubenswrapper[4822]: I1201 07:46:32.263472 4822 generic.go:334] "Generic (PLEG): container finished" podID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerID="b5963172b38b76d6860216047b5d529f4ff409e296909600e71d2fbac9a88671" exitCode=0 Dec 01 07:46:32 crc kubenswrapper[4822]: I1201 07:46:32.263518 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerDied","Data":"b5963172b38b76d6860216047b5d529f4ff409e296909600e71d2fbac9a88671"} Dec 01 07:46:33 crc kubenswrapper[4822]: I1201 07:46:33.277804 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerStarted","Data":"e9ba7e6baf13d8b607c8c4caf952d889f8488ec9815e09d03881202af7cfd5e4"} Dec 01 07:46:33 crc kubenswrapper[4822]: I1201 07:46:33.280874 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerStarted","Data":"31537247ef16a847b52a0fbe5e281de1684ec3ddb57316d7d7a35a4e411bcdf4"} Dec 01 07:46:33 crc kubenswrapper[4822]: I1201 07:46:33.309752 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qpd55" podStartSLOduration=2.878589897 podStartE2EDuration="5.309728706s" podCreationTimestamp="2025-12-01 07:46:28 +0000 UTC" firstStartedPulling="2025-12-01 07:46:30.234785178 +0000 UTC m=+3345.555592894" lastFinishedPulling="2025-12-01 07:46:32.665924017 +0000 UTC m=+3347.986731703" observedRunningTime="2025-12-01 07:46:33.301706606 +0000 UTC m=+3348.622514312" watchObservedRunningTime="2025-12-01 07:46:33.309728706 +0000 UTC m=+3348.630536412" Dec 01 07:46:33 crc kubenswrapper[4822]: I1201 07:46:33.339247 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-crksg" podStartSLOduration=2.829042785 podStartE2EDuration="5.339226594s" podCreationTimestamp="2025-12-01 07:46:28 +0000 UTC" firstStartedPulling="2025-12-01 07:46:30.23763504 +0000 UTC m=+3345.558442776" lastFinishedPulling="2025-12-01 07:46:32.747818899 +0000 UTC m=+3348.068626585" observedRunningTime="2025-12-01 07:46:33.336847315 +0000 UTC m=+3348.657655011" watchObservedRunningTime="2025-12-01 07:46:33.339226594 +0000 UTC m=+3348.660034280" Dec 01 07:46:38 crc kubenswrapper[4822]: I1201 07:46:38.730002 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:38 crc kubenswrapper[4822]: I1201 07:46:38.730829 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:38 crc kubenswrapper[4822]: I1201 07:46:38.806404 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:38 crc kubenswrapper[4822]: I1201 07:46:38.908324 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:38 crc kubenswrapper[4822]: I1201 07:46:38.908406 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:38 crc kubenswrapper[4822]: I1201 07:46:38.989480 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:39 crc kubenswrapper[4822]: I1201 07:46:39.411247 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:39 crc kubenswrapper[4822]: I1201 07:46:39.431524 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:40 crc kubenswrapper[4822]: I1201 07:46:40.458509 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crksg"] Dec 01 07:46:41 crc kubenswrapper[4822]: I1201 07:46:41.359910 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-crksg" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="registry-server" containerID="cri-o://31537247ef16a847b52a0fbe5e281de1684ec3ddb57316d7d7a35a4e411bcdf4" gracePeriod=2 Dec 01 07:46:41 crc kubenswrapper[4822]: I1201 07:46:41.859206 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qpd55"] Dec 01 07:46:41 crc kubenswrapper[4822]: I1201 07:46:41.859523 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qpd55" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="registry-server" containerID="cri-o://e9ba7e6baf13d8b607c8c4caf952d889f8488ec9815e09d03881202af7cfd5e4" gracePeriod=2 Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.374505 4822 generic.go:334] "Generic (PLEG): container finished" podID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerID="31537247ef16a847b52a0fbe5e281de1684ec3ddb57316d7d7a35a4e411bcdf4" exitCode=0 Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.374639 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerDied","Data":"31537247ef16a847b52a0fbe5e281de1684ec3ddb57316d7d7a35a4e411bcdf4"} Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.378981 4822 generic.go:334] "Generic (PLEG): container finished" podID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerID="e9ba7e6baf13d8b607c8c4caf952d889f8488ec9815e09d03881202af7cfd5e4" exitCode=0 Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.379081 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerDied","Data":"e9ba7e6baf13d8b607c8c4caf952d889f8488ec9815e09d03881202af7cfd5e4"} Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.542741 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.542903 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.888852 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:42 crc kubenswrapper[4822]: I1201 07:46:42.953193 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.017722 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-catalog-content\") pod \"ba1c525f-fef3-42d3-89fb-2797884cddb9\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.017805 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-utilities\") pod \"ba1c525f-fef3-42d3-89fb-2797884cddb9\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.017877 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwjbq\" (UniqueName: \"kubernetes.io/projected/ba1c525f-fef3-42d3-89fb-2797884cddb9-kube-api-access-cwjbq\") pod \"ba1c525f-fef3-42d3-89fb-2797884cddb9\" (UID: \"ba1c525f-fef3-42d3-89fb-2797884cddb9\") " Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.019290 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-utilities" (OuterVolumeSpecName: "utilities") pod "ba1c525f-fef3-42d3-89fb-2797884cddb9" (UID: "ba1c525f-fef3-42d3-89fb-2797884cddb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.024813 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba1c525f-fef3-42d3-89fb-2797884cddb9-kube-api-access-cwjbq" (OuterVolumeSpecName: "kube-api-access-cwjbq") pod "ba1c525f-fef3-42d3-89fb-2797884cddb9" (UID: "ba1c525f-fef3-42d3-89fb-2797884cddb9"). InnerVolumeSpecName "kube-api-access-cwjbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.045306 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba1c525f-fef3-42d3-89fb-2797884cddb9" (UID: "ba1c525f-fef3-42d3-89fb-2797884cddb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.119494 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-catalog-content\") pod \"506367d7-a03e-4e67-a3e3-6fe145a43af5\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.119541 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgx8p\" (UniqueName: \"kubernetes.io/projected/506367d7-a03e-4e67-a3e3-6fe145a43af5-kube-api-access-fgx8p\") pod \"506367d7-a03e-4e67-a3e3-6fe145a43af5\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.119596 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-utilities\") pod \"506367d7-a03e-4e67-a3e3-6fe145a43af5\" (UID: \"506367d7-a03e-4e67-a3e3-6fe145a43af5\") " Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.119946 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.119964 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwjbq\" (UniqueName: \"kubernetes.io/projected/ba1c525f-fef3-42d3-89fb-2797884cddb9-kube-api-access-cwjbq\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.119975 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba1c525f-fef3-42d3-89fb-2797884cddb9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.120352 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-utilities" (OuterVolumeSpecName: "utilities") pod "506367d7-a03e-4e67-a3e3-6fe145a43af5" (UID: "506367d7-a03e-4e67-a3e3-6fe145a43af5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.122219 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/506367d7-a03e-4e67-a3e3-6fe145a43af5-kube-api-access-fgx8p" (OuterVolumeSpecName: "kube-api-access-fgx8p") pod "506367d7-a03e-4e67-a3e3-6fe145a43af5" (UID: "506367d7-a03e-4e67-a3e3-6fe145a43af5"). InnerVolumeSpecName "kube-api-access-fgx8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.177878 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "506367d7-a03e-4e67-a3e3-6fe145a43af5" (UID: "506367d7-a03e-4e67-a3e3-6fe145a43af5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.221480 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.221529 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgx8p\" (UniqueName: \"kubernetes.io/projected/506367d7-a03e-4e67-a3e3-6fe145a43af5-kube-api-access-fgx8p\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.221588 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/506367d7-a03e-4e67-a3e3-6fe145a43af5-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.392617 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qpd55" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.392582 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qpd55" event={"ID":"ba1c525f-fef3-42d3-89fb-2797884cddb9","Type":"ContainerDied","Data":"c99a6db2d01665b1e24079cb9687cb9f5c22f658913f00ee6fde78a0563e90a0"} Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.393110 4822 scope.go:117] "RemoveContainer" containerID="e9ba7e6baf13d8b607c8c4caf952d889f8488ec9815e09d03881202af7cfd5e4" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.402888 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crksg" event={"ID":"506367d7-a03e-4e67-a3e3-6fe145a43af5","Type":"ContainerDied","Data":"abf3b9d742fe16b4cf9c70aa93529fe2950221c92b98dd472d9060e4367d4d01"} Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.403005 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crksg" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.429603 4822 scope.go:117] "RemoveContainer" containerID="16063d3c8e028d33bc47791e222c33c79c7dd974a2916c0170287e02aa2c7eb8" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.463235 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qpd55"] Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.474240 4822 scope.go:117] "RemoveContainer" containerID="cd333181dea592a45252358bab04f9f57e4c1b8e62509361482783ff740a7165" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.477754 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qpd55"] Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.489336 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crksg"] Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.495828 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-crksg"] Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.508062 4822 scope.go:117] "RemoveContainer" containerID="31537247ef16a847b52a0fbe5e281de1684ec3ddb57316d7d7a35a4e411bcdf4" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.530716 4822 scope.go:117] "RemoveContainer" containerID="b5963172b38b76d6860216047b5d529f4ff409e296909600e71d2fbac9a88671" Dec 01 07:46:43 crc kubenswrapper[4822]: I1201 07:46:43.563031 4822 scope.go:117] "RemoveContainer" containerID="bddf3e898ff122061e22898436ff2fc649c52c866e669b80fbbe69b84240602d" Dec 01 07:46:43 crc kubenswrapper[4822]: E1201 07:46:43.612111 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod506367d7_a03e_4e67_a3e3_6fe145a43af5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba1c525f_fef3_42d3_89fb_2797884cddb9.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba1c525f_fef3_42d3_89fb_2797884cddb9.slice/crio-c99a6db2d01665b1e24079cb9687cb9f5c22f658913f00ee6fde78a0563e90a0\": RecentStats: unable to find data in memory cache]" Dec 01 07:46:44 crc kubenswrapper[4822]: I1201 07:46:44.967109 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" path="/var/lib/kubelet/pods/506367d7-a03e-4e67-a3e3-6fe145a43af5/volumes" Dec 01 07:46:44 crc kubenswrapper[4822]: I1201 07:46:44.970661 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" path="/var/lib/kubelet/pods/ba1c525f-fef3-42d3-89fb-2797884cddb9/volumes" Dec 01 07:47:12 crc kubenswrapper[4822]: I1201 07:47:12.543461 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:47:12 crc kubenswrapper[4822]: I1201 07:47:12.544635 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.542354 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.543168 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.543237 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.544177 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"57a885c079c73341d6724e1c97563efa990b89a27b8bd077eb8da1976dad0f24"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.544263 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://57a885c079c73341d6724e1c97563efa990b89a27b8bd077eb8da1976dad0f24" gracePeriod=600 Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.967653 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="57a885c079c73341d6724e1c97563efa990b89a27b8bd077eb8da1976dad0f24" exitCode=0 Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.967726 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"57a885c079c73341d6724e1c97563efa990b89a27b8bd077eb8da1976dad0f24"} Dec 01 07:47:42 crc kubenswrapper[4822]: I1201 07:47:42.968055 4822 scope.go:117] "RemoveContainer" containerID="37f5e7df40f1a33bb5602a4c7da7bf9c43cff0a0ec72f71e690579bef0acc919" Dec 01 07:47:43 crc kubenswrapper[4822]: I1201 07:47:43.977962 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f"} Dec 01 07:50:12 crc kubenswrapper[4822]: I1201 07:50:12.543309 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:50:12 crc kubenswrapper[4822]: I1201 07:50:12.544199 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:50:42 crc kubenswrapper[4822]: I1201 07:50:42.543031 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:50:42 crc kubenswrapper[4822]: I1201 07:50:42.543518 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:51:12 crc kubenswrapper[4822]: I1201 07:51:12.543336 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:51:12 crc kubenswrapper[4822]: I1201 07:51:12.543947 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:51:12 crc kubenswrapper[4822]: I1201 07:51:12.543992 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:51:12 crc kubenswrapper[4822]: I1201 07:51:12.544683 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:51:12 crc kubenswrapper[4822]: I1201 07:51:12.544740 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" gracePeriod=600 Dec 01 07:51:12 crc kubenswrapper[4822]: E1201 07:51:12.667596 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:51:13 crc kubenswrapper[4822]: I1201 07:51:13.096861 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" exitCode=0 Dec 01 07:51:13 crc kubenswrapper[4822]: I1201 07:51:13.097247 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f"} Dec 01 07:51:13 crc kubenswrapper[4822]: I1201 07:51:13.097453 4822 scope.go:117] "RemoveContainer" containerID="57a885c079c73341d6724e1c97563efa990b89a27b8bd077eb8da1976dad0f24" Dec 01 07:51:13 crc kubenswrapper[4822]: I1201 07:51:13.098101 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:51:13 crc kubenswrapper[4822]: E1201 07:51:13.098435 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:51:28 crc kubenswrapper[4822]: I1201 07:51:28.952214 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:51:28 crc kubenswrapper[4822]: E1201 07:51:28.953508 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:51:41 crc kubenswrapper[4822]: I1201 07:51:41.950934 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:51:41 crc kubenswrapper[4822]: E1201 07:51:41.951910 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:51:54 crc kubenswrapper[4822]: I1201 07:51:54.960005 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:51:54 crc kubenswrapper[4822]: E1201 07:51:54.961136 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:52:08 crc kubenswrapper[4822]: I1201 07:52:08.951326 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:52:08 crc kubenswrapper[4822]: E1201 07:52:08.952166 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:52:20 crc kubenswrapper[4822]: I1201 07:52:20.950852 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:52:20 crc kubenswrapper[4822]: E1201 07:52:20.951571 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:52:32 crc kubenswrapper[4822]: I1201 07:52:32.951724 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:52:32 crc kubenswrapper[4822]: E1201 07:52:32.952512 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:52:44 crc kubenswrapper[4822]: I1201 07:52:44.961515 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:52:44 crc kubenswrapper[4822]: E1201 07:52:44.962583 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:52:57 crc kubenswrapper[4822]: I1201 07:52:57.951625 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:52:57 crc kubenswrapper[4822]: E1201 07:52:57.952769 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:53:10 crc kubenswrapper[4822]: I1201 07:53:10.951629 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:53:10 crc kubenswrapper[4822]: E1201 07:53:10.952944 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:53:24 crc kubenswrapper[4822]: I1201 07:53:24.956841 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:53:24 crc kubenswrapper[4822]: E1201 07:53:24.957639 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:53:36 crc kubenswrapper[4822]: I1201 07:53:36.950920 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:53:36 crc kubenswrapper[4822]: E1201 07:53:36.951952 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:53:50 crc kubenswrapper[4822]: I1201 07:53:50.951116 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:53:50 crc kubenswrapper[4822]: E1201 07:53:50.951876 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:54:04 crc kubenswrapper[4822]: I1201 07:54:04.959047 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:54:04 crc kubenswrapper[4822]: E1201 07:54:04.960095 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:54:19 crc kubenswrapper[4822]: I1201 07:54:19.951131 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:54:19 crc kubenswrapper[4822]: E1201 07:54:19.952364 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:54:32 crc kubenswrapper[4822]: I1201 07:54:32.951775 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:54:32 crc kubenswrapper[4822]: E1201 07:54:32.952983 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:54:45 crc kubenswrapper[4822]: I1201 07:54:45.950773 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:54:45 crc kubenswrapper[4822]: E1201 07:54:45.951968 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:54:59 crc kubenswrapper[4822]: I1201 07:54:59.951183 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:54:59 crc kubenswrapper[4822]: E1201 07:54:59.952037 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:55:14 crc kubenswrapper[4822]: I1201 07:55:14.951470 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:55:14 crc kubenswrapper[4822]: E1201 07:55:14.952578 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:55:29 crc kubenswrapper[4822]: I1201 07:55:29.951052 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:55:29 crc kubenswrapper[4822]: E1201 07:55:29.952212 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:55:40 crc kubenswrapper[4822]: I1201 07:55:40.951962 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:55:40 crc kubenswrapper[4822]: E1201 07:55:40.952956 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:55:55 crc kubenswrapper[4822]: I1201 07:55:55.950414 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:55:55 crc kubenswrapper[4822]: E1201 07:55:55.951145 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:56:07 crc kubenswrapper[4822]: I1201 07:56:07.951654 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:56:07 crc kubenswrapper[4822]: E1201 07:56:07.952655 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.339214 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sbz92"] Dec 01 07:56:18 crc kubenswrapper[4822]: E1201 07:56:18.340121 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="extract-utilities" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340137 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="extract-utilities" Dec 01 07:56:18 crc kubenswrapper[4822]: E1201 07:56:18.340172 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="extract-utilities" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340182 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="extract-utilities" Dec 01 07:56:18 crc kubenswrapper[4822]: E1201 07:56:18.340196 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="registry-server" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340206 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="registry-server" Dec 01 07:56:18 crc kubenswrapper[4822]: E1201 07:56:18.340221 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="extract-content" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340230 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="extract-content" Dec 01 07:56:18 crc kubenswrapper[4822]: E1201 07:56:18.340246 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="extract-content" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340255 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="extract-content" Dec 01 07:56:18 crc kubenswrapper[4822]: E1201 07:56:18.340267 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="registry-server" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340275 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="registry-server" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340473 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="506367d7-a03e-4e67-a3e3-6fe145a43af5" containerName="registry-server" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.340504 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba1c525f-fef3-42d3-89fb-2797884cddb9" containerName="registry-server" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.341878 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.384819 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sbz92"] Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.386128 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-utilities\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.386198 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-catalog-content\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.386241 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z9cd\" (UniqueName: \"kubernetes.io/projected/6687e482-a7b5-4632-879c-c6b29a8c9324-kube-api-access-4z9cd\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.487488 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-catalog-content\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.487540 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z9cd\" (UniqueName: \"kubernetes.io/projected/6687e482-a7b5-4632-879c-c6b29a8c9324-kube-api-access-4z9cd\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.487630 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-utilities\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.488113 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-catalog-content\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.488144 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-utilities\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.507968 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z9cd\" (UniqueName: \"kubernetes.io/projected/6687e482-a7b5-4632-879c-c6b29a8c9324-kube-api-access-4z9cd\") pod \"redhat-operators-sbz92\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:18 crc kubenswrapper[4822]: I1201 07:56:18.688417 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:19 crc kubenswrapper[4822]: I1201 07:56:19.158459 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sbz92"] Dec 01 07:56:20 crc kubenswrapper[4822]: I1201 07:56:20.109441 4822 generic.go:334] "Generic (PLEG): container finished" podID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerID="3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01" exitCode=0 Dec 01 07:56:20 crc kubenswrapper[4822]: I1201 07:56:20.109945 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerDied","Data":"3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01"} Dec 01 07:56:20 crc kubenswrapper[4822]: I1201 07:56:20.109988 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerStarted","Data":"0b82dd8c4866c3de8f0fac4a958feaed398b67bf14950d81add4722217aa6524"} Dec 01 07:56:20 crc kubenswrapper[4822]: I1201 07:56:20.112638 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:56:21 crc kubenswrapper[4822]: I1201 07:56:21.950898 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 07:56:22 crc kubenswrapper[4822]: I1201 07:56:22.126888 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"1e7876090c2639b4c970893e49b7fbfd032096366e59e94f9018c4aedbda1cfc"} Dec 01 07:56:22 crc kubenswrapper[4822]: I1201 07:56:22.129692 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerStarted","Data":"abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58"} Dec 01 07:56:23 crc kubenswrapper[4822]: I1201 07:56:23.143342 4822 generic.go:334] "Generic (PLEG): container finished" podID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerID="abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58" exitCode=0 Dec 01 07:56:23 crc kubenswrapper[4822]: I1201 07:56:23.143389 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerDied","Data":"abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58"} Dec 01 07:56:24 crc kubenswrapper[4822]: I1201 07:56:24.152604 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerStarted","Data":"efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3"} Dec 01 07:56:24 crc kubenswrapper[4822]: I1201 07:56:24.180900 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sbz92" podStartSLOduration=2.642880413 podStartE2EDuration="6.180881796s" podCreationTimestamp="2025-12-01 07:56:18 +0000 UTC" firstStartedPulling="2025-12-01 07:56:20.112213887 +0000 UTC m=+3935.433021603" lastFinishedPulling="2025-12-01 07:56:23.65021526 +0000 UTC m=+3938.971022986" observedRunningTime="2025-12-01 07:56:24.177406197 +0000 UTC m=+3939.498213883" watchObservedRunningTime="2025-12-01 07:56:24.180881796 +0000 UTC m=+3939.501689482" Dec 01 07:56:28 crc kubenswrapper[4822]: I1201 07:56:28.689278 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:28 crc kubenswrapper[4822]: I1201 07:56:28.689948 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:29 crc kubenswrapper[4822]: I1201 07:56:29.724952 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sbz92" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="registry-server" probeResult="failure" output=< Dec 01 07:56:29 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 07:56:29 crc kubenswrapper[4822]: > Dec 01 07:56:38 crc kubenswrapper[4822]: I1201 07:56:38.765386 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:38 crc kubenswrapper[4822]: I1201 07:56:38.851156 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:39 crc kubenswrapper[4822]: I1201 07:56:39.024658 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sbz92"] Dec 01 07:56:40 crc kubenswrapper[4822]: I1201 07:56:40.289531 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sbz92" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="registry-server" containerID="cri-o://efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3" gracePeriod=2 Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.113770 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.169653 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-catalog-content\") pod \"6687e482-a7b5-4632-879c-c6b29a8c9324\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.169714 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4z9cd\" (UniqueName: \"kubernetes.io/projected/6687e482-a7b5-4632-879c-c6b29a8c9324-kube-api-access-4z9cd\") pod \"6687e482-a7b5-4632-879c-c6b29a8c9324\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.169736 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-utilities\") pod \"6687e482-a7b5-4632-879c-c6b29a8c9324\" (UID: \"6687e482-a7b5-4632-879c-c6b29a8c9324\") " Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.170798 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-utilities" (OuterVolumeSpecName: "utilities") pod "6687e482-a7b5-4632-879c-c6b29a8c9324" (UID: "6687e482-a7b5-4632-879c-c6b29a8c9324"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.190210 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6687e482-a7b5-4632-879c-c6b29a8c9324-kube-api-access-4z9cd" (OuterVolumeSpecName: "kube-api-access-4z9cd") pod "6687e482-a7b5-4632-879c-c6b29a8c9324" (UID: "6687e482-a7b5-4632-879c-c6b29a8c9324"). InnerVolumeSpecName "kube-api-access-4z9cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.271465 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4z9cd\" (UniqueName: \"kubernetes.io/projected/6687e482-a7b5-4632-879c-c6b29a8c9324-kube-api-access-4z9cd\") on node \"crc\" DevicePath \"\"" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.271500 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.272077 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6687e482-a7b5-4632-879c-c6b29a8c9324" (UID: "6687e482-a7b5-4632-879c-c6b29a8c9324"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.298602 4822 generic.go:334] "Generic (PLEG): container finished" podID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerID="efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3" exitCode=0 Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.298661 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerDied","Data":"efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3"} Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.298690 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbz92" event={"ID":"6687e482-a7b5-4632-879c-c6b29a8c9324","Type":"ContainerDied","Data":"0b82dd8c4866c3de8f0fac4a958feaed398b67bf14950d81add4722217aa6524"} Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.298661 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbz92" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.298723 4822 scope.go:117] "RemoveContainer" containerID="efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.319334 4822 scope.go:117] "RemoveContainer" containerID="abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.341583 4822 scope.go:117] "RemoveContainer" containerID="3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.373837 4822 scope.go:117] "RemoveContainer" containerID="efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3" Dec 01 07:56:41 crc kubenswrapper[4822]: E1201 07:56:41.374208 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3\": container with ID starting with efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3 not found: ID does not exist" containerID="efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.374244 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3"} err="failed to get container status \"efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3\": rpc error: code = NotFound desc = could not find container \"efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3\": container with ID starting with efea80cb2b61f7defa4c59902f41dee3fb366914ed1414f70384277a740bd5c3 not found: ID does not exist" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.374269 4822 scope.go:117] "RemoveContainer" containerID="abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.374410 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6687e482-a7b5-4632-879c-c6b29a8c9324-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:56:41 crc kubenswrapper[4822]: E1201 07:56:41.374578 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58\": container with ID starting with abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58 not found: ID does not exist" containerID="abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.374607 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58"} err="failed to get container status \"abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58\": rpc error: code = NotFound desc = could not find container \"abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58\": container with ID starting with abfcfa9e12d3544487071ddd1c4042623260757ed6402a86b9541c2cc6214f58 not found: ID does not exist" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.374627 4822 scope.go:117] "RemoveContainer" containerID="3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01" Dec 01 07:56:41 crc kubenswrapper[4822]: E1201 07:56:41.375744 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01\": container with ID starting with 3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01 not found: ID does not exist" containerID="3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.375781 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01"} err="failed to get container status \"3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01\": rpc error: code = NotFound desc = could not find container \"3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01\": container with ID starting with 3d367232f70a68b66ce0885b18520f54222d6df7d49a243388d47f5a3c062a01 not found: ID does not exist" Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.383116 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sbz92"] Dec 01 07:56:41 crc kubenswrapper[4822]: I1201 07:56:41.394480 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sbz92"] Dec 01 07:56:42 crc kubenswrapper[4822]: I1201 07:56:42.964073 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" path="/var/lib/kubelet/pods/6687e482-a7b5-4632-879c-c6b29a8c9324/volumes" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.326736 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k94rf"] Dec 01 07:56:55 crc kubenswrapper[4822]: E1201 07:56:55.329166 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="extract-content" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.329438 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="extract-content" Dec 01 07:56:55 crc kubenswrapper[4822]: E1201 07:56:55.329914 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="extract-utilities" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.330049 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="extract-utilities" Dec 01 07:56:55 crc kubenswrapper[4822]: E1201 07:56:55.330200 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="registry-server" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.330324 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="registry-server" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.330683 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="6687e482-a7b5-4632-879c-c6b29a8c9324" containerName="registry-server" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.332484 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.350004 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k94rf"] Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.381465 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-catalog-content\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.381517 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-utilities\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.381625 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhrdd\" (UniqueName: \"kubernetes.io/projected/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-kube-api-access-zhrdd\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.483440 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-catalog-content\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.483489 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-utilities\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.483528 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhrdd\" (UniqueName: \"kubernetes.io/projected/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-kube-api-access-zhrdd\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.484015 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-utilities\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.484131 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-catalog-content\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.827064 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhrdd\" (UniqueName: \"kubernetes.io/projected/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-kube-api-access-zhrdd\") pod \"certified-operators-k94rf\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:55 crc kubenswrapper[4822]: I1201 07:56:55.991503 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:56:56 crc kubenswrapper[4822]: I1201 07:56:56.490885 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k94rf"] Dec 01 07:56:57 crc kubenswrapper[4822]: I1201 07:56:57.429527 4822 generic.go:334] "Generic (PLEG): container finished" podID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerID="38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd" exitCode=0 Dec 01 07:56:57 crc kubenswrapper[4822]: I1201 07:56:57.429756 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerDied","Data":"38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd"} Dec 01 07:56:57 crc kubenswrapper[4822]: I1201 07:56:57.430170 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerStarted","Data":"a9bff59f23de358766dfd032a1427eb59bfecacf716d4e03949d610e948366ed"} Dec 01 07:56:58 crc kubenswrapper[4822]: I1201 07:56:58.441317 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerStarted","Data":"ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8"} Dec 01 07:56:59 crc kubenswrapper[4822]: I1201 07:56:59.454596 4822 generic.go:334] "Generic (PLEG): container finished" podID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerID="ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8" exitCode=0 Dec 01 07:56:59 crc kubenswrapper[4822]: I1201 07:56:59.454713 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerDied","Data":"ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8"} Dec 01 07:57:00 crc kubenswrapper[4822]: I1201 07:57:00.467502 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerStarted","Data":"8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5"} Dec 01 07:57:00 crc kubenswrapper[4822]: I1201 07:57:00.502927 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k94rf" podStartSLOduration=2.831293035 podStartE2EDuration="5.502907951s" podCreationTimestamp="2025-12-01 07:56:55 +0000 UTC" firstStartedPulling="2025-12-01 07:56:57.431346177 +0000 UTC m=+3972.752153863" lastFinishedPulling="2025-12-01 07:57:00.102961053 +0000 UTC m=+3975.423768779" observedRunningTime="2025-12-01 07:57:00.497744355 +0000 UTC m=+3975.818552091" watchObservedRunningTime="2025-12-01 07:57:00.502907951 +0000 UTC m=+3975.823715647" Dec 01 07:57:05 crc kubenswrapper[4822]: I1201 07:57:05.991892 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:57:05 crc kubenswrapper[4822]: I1201 07:57:05.992321 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:57:06 crc kubenswrapper[4822]: I1201 07:57:06.032625 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:57:06 crc kubenswrapper[4822]: I1201 07:57:06.602451 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:57:06 crc kubenswrapper[4822]: I1201 07:57:06.658756 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k94rf"] Dec 01 07:57:08 crc kubenswrapper[4822]: I1201 07:57:08.550777 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k94rf" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="registry-server" containerID="cri-o://8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5" gracePeriod=2 Dec 01 07:57:08 crc kubenswrapper[4822]: I1201 07:57:08.961429 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:57:08 crc kubenswrapper[4822]: I1201 07:57:08.986354 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-utilities\") pod \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " Dec 01 07:57:08 crc kubenswrapper[4822]: I1201 07:57:08.986429 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhrdd\" (UniqueName: \"kubernetes.io/projected/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-kube-api-access-zhrdd\") pod \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " Dec 01 07:57:08 crc kubenswrapper[4822]: I1201 07:57:08.989422 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-utilities" (OuterVolumeSpecName: "utilities") pod "cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" (UID: "cb547c9b-af1d-4dec-a0c4-77fe1a5bea24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:57:08 crc kubenswrapper[4822]: I1201 07:57:08.995699 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-kube-api-access-zhrdd" (OuterVolumeSpecName: "kube-api-access-zhrdd") pod "cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" (UID: "cb547c9b-af1d-4dec-a0c4-77fe1a5bea24"). InnerVolumeSpecName "kube-api-access-zhrdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.089178 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-catalog-content\") pod \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\" (UID: \"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24\") " Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.089442 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhrdd\" (UniqueName: \"kubernetes.io/projected/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-kube-api-access-zhrdd\") on node \"crc\" DevicePath \"\"" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.089458 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.139532 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" (UID: "cb547c9b-af1d-4dec-a0c4-77fe1a5bea24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.190446 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.560870 4822 generic.go:334] "Generic (PLEG): container finished" podID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerID="8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5" exitCode=0 Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.560914 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerDied","Data":"8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5"} Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.560942 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k94rf" event={"ID":"cb547c9b-af1d-4dec-a0c4-77fe1a5bea24","Type":"ContainerDied","Data":"a9bff59f23de358766dfd032a1427eb59bfecacf716d4e03949d610e948366ed"} Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.560962 4822 scope.go:117] "RemoveContainer" containerID="8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.561008 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k94rf" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.586376 4822 scope.go:117] "RemoveContainer" containerID="ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.609780 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k94rf"] Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.613320 4822 scope.go:117] "RemoveContainer" containerID="38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.631205 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k94rf"] Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.655359 4822 scope.go:117] "RemoveContainer" containerID="8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5" Dec 01 07:57:09 crc kubenswrapper[4822]: E1201 07:57:09.655911 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5\": container with ID starting with 8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5 not found: ID does not exist" containerID="8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.655960 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5"} err="failed to get container status \"8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5\": rpc error: code = NotFound desc = could not find container \"8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5\": container with ID starting with 8f73b0f4f7c580e83da1760b1ae30016a40fc7ce952a89f6b8a419a84f2076f5 not found: ID does not exist" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.655998 4822 scope.go:117] "RemoveContainer" containerID="ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8" Dec 01 07:57:09 crc kubenswrapper[4822]: E1201 07:57:09.657007 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8\": container with ID starting with ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8 not found: ID does not exist" containerID="ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.657061 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8"} err="failed to get container status \"ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8\": rpc error: code = NotFound desc = could not find container \"ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8\": container with ID starting with ca2862dfe908bd63ea8a8de4c32a15913ee8b07cbdf3ad121dfde9518acd9be8 not found: ID does not exist" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.657095 4822 scope.go:117] "RemoveContainer" containerID="38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd" Dec 01 07:57:09 crc kubenswrapper[4822]: E1201 07:57:09.657474 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd\": container with ID starting with 38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd not found: ID does not exist" containerID="38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd" Dec 01 07:57:09 crc kubenswrapper[4822]: I1201 07:57:09.657513 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd"} err="failed to get container status \"38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd\": rpc error: code = NotFound desc = could not find container \"38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd\": container with ID starting with 38703749df21a6ba1a296f96646e8c299a1a22a1796fc8429c6857df873231fd not found: ID does not exist" Dec 01 07:57:10 crc kubenswrapper[4822]: I1201 07:57:10.967283 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" path="/var/lib/kubelet/pods/cb547c9b-af1d-4dec-a0c4-77fe1a5bea24/volumes" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.681250 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z2b26"] Dec 01 07:57:11 crc kubenswrapper[4822]: E1201 07:57:11.681595 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="registry-server" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.681610 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="registry-server" Dec 01 07:57:11 crc kubenswrapper[4822]: E1201 07:57:11.681624 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="extract-content" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.681632 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="extract-content" Dec 01 07:57:11 crc kubenswrapper[4822]: E1201 07:57:11.681656 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="extract-utilities" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.681665 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="extract-utilities" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.681854 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb547c9b-af1d-4dec-a0c4-77fe1a5bea24" containerName="registry-server" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.683683 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.695919 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z2b26"] Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.832269 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-catalog-content\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.832411 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bgdd\" (UniqueName: \"kubernetes.io/projected/d5999b43-5ae5-4e1b-b413-adbf193fbe48-kube-api-access-9bgdd\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.832633 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-utilities\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.933991 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-utilities\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.934092 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-catalog-content\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.934145 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bgdd\" (UniqueName: \"kubernetes.io/projected/d5999b43-5ae5-4e1b-b413-adbf193fbe48-kube-api-access-9bgdd\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.934978 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-utilities\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.935253 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-catalog-content\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:11 crc kubenswrapper[4822]: I1201 07:57:11.955986 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bgdd\" (UniqueName: \"kubernetes.io/projected/d5999b43-5ae5-4e1b-b413-adbf193fbe48-kube-api-access-9bgdd\") pod \"redhat-marketplace-z2b26\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:12 crc kubenswrapper[4822]: I1201 07:57:12.047227 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:12 crc kubenswrapper[4822]: I1201 07:57:12.480926 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z2b26"] Dec 01 07:57:12 crc kubenswrapper[4822]: W1201 07:57:12.482424 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5999b43_5ae5_4e1b_b413_adbf193fbe48.slice/crio-d31bd3f8edce862f2e21dd2866bd19d18fd0aaf2e63dbbbd74694fd26f05dfc0 WatchSource:0}: Error finding container d31bd3f8edce862f2e21dd2866bd19d18fd0aaf2e63dbbbd74694fd26f05dfc0: Status 404 returned error can't find the container with id d31bd3f8edce862f2e21dd2866bd19d18fd0aaf2e63dbbbd74694fd26f05dfc0 Dec 01 07:57:12 crc kubenswrapper[4822]: I1201 07:57:12.591378 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z2b26" event={"ID":"d5999b43-5ae5-4e1b-b413-adbf193fbe48","Type":"ContainerStarted","Data":"d31bd3f8edce862f2e21dd2866bd19d18fd0aaf2e63dbbbd74694fd26f05dfc0"} Dec 01 07:57:13 crc kubenswrapper[4822]: I1201 07:57:13.604321 4822 generic.go:334] "Generic (PLEG): container finished" podID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerID="9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb" exitCode=0 Dec 01 07:57:13 crc kubenswrapper[4822]: I1201 07:57:13.604379 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z2b26" event={"ID":"d5999b43-5ae5-4e1b-b413-adbf193fbe48","Type":"ContainerDied","Data":"9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb"} Dec 01 07:57:15 crc kubenswrapper[4822]: I1201 07:57:15.622464 4822 generic.go:334] "Generic (PLEG): container finished" podID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerID="ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965" exitCode=0 Dec 01 07:57:15 crc kubenswrapper[4822]: I1201 07:57:15.622615 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z2b26" event={"ID":"d5999b43-5ae5-4e1b-b413-adbf193fbe48","Type":"ContainerDied","Data":"ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965"} Dec 01 07:57:16 crc kubenswrapper[4822]: I1201 07:57:16.636460 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z2b26" event={"ID":"d5999b43-5ae5-4e1b-b413-adbf193fbe48","Type":"ContainerStarted","Data":"2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712"} Dec 01 07:57:16 crc kubenswrapper[4822]: I1201 07:57:16.665855 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-z2b26" podStartSLOduration=3.260632064 podStartE2EDuration="5.665837082s" podCreationTimestamp="2025-12-01 07:57:11 +0000 UTC" firstStartedPulling="2025-12-01 07:57:13.606540085 +0000 UTC m=+3988.927347811" lastFinishedPulling="2025-12-01 07:57:16.011745143 +0000 UTC m=+3991.332552829" observedRunningTime="2025-12-01 07:57:16.661001305 +0000 UTC m=+3991.981809061" watchObservedRunningTime="2025-12-01 07:57:16.665837082 +0000 UTC m=+3991.986644768" Dec 01 07:57:22 crc kubenswrapper[4822]: I1201 07:57:22.047905 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:22 crc kubenswrapper[4822]: I1201 07:57:22.063723 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:22 crc kubenswrapper[4822]: I1201 07:57:22.138437 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:22 crc kubenswrapper[4822]: I1201 07:57:22.758214 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:22 crc kubenswrapper[4822]: I1201 07:57:22.835190 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z2b26"] Dec 01 07:57:24 crc kubenswrapper[4822]: I1201 07:57:24.708123 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-z2b26" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="registry-server" containerID="cri-o://2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712" gracePeriod=2 Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.240451 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.406864 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-utilities\") pod \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.407000 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bgdd\" (UniqueName: \"kubernetes.io/projected/d5999b43-5ae5-4e1b-b413-adbf193fbe48-kube-api-access-9bgdd\") pod \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.407019 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-catalog-content\") pod \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\" (UID: \"d5999b43-5ae5-4e1b-b413-adbf193fbe48\") " Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.407703 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-utilities" (OuterVolumeSpecName: "utilities") pod "d5999b43-5ae5-4e1b-b413-adbf193fbe48" (UID: "d5999b43-5ae5-4e1b-b413-adbf193fbe48"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.419954 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5999b43-5ae5-4e1b-b413-adbf193fbe48-kube-api-access-9bgdd" (OuterVolumeSpecName: "kube-api-access-9bgdd") pod "d5999b43-5ae5-4e1b-b413-adbf193fbe48" (UID: "d5999b43-5ae5-4e1b-b413-adbf193fbe48"). InnerVolumeSpecName "kube-api-access-9bgdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.508310 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bgdd\" (UniqueName: \"kubernetes.io/projected/d5999b43-5ae5-4e1b-b413-adbf193fbe48-kube-api-access-9bgdd\") on node \"crc\" DevicePath \"\"" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.508624 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.518746 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5999b43-5ae5-4e1b-b413-adbf193fbe48" (UID: "d5999b43-5ae5-4e1b-b413-adbf193fbe48"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.610210 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5999b43-5ae5-4e1b-b413-adbf193fbe48-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.732898 4822 generic.go:334] "Generic (PLEG): container finished" podID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerID="2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712" exitCode=0 Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.732941 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z2b26" event={"ID":"d5999b43-5ae5-4e1b-b413-adbf193fbe48","Type":"ContainerDied","Data":"2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712"} Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.732970 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z2b26" event={"ID":"d5999b43-5ae5-4e1b-b413-adbf193fbe48","Type":"ContainerDied","Data":"d31bd3f8edce862f2e21dd2866bd19d18fd0aaf2e63dbbbd74694fd26f05dfc0"} Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.732992 4822 scope.go:117] "RemoveContainer" containerID="2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.733126 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z2b26" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.772042 4822 scope.go:117] "RemoveContainer" containerID="ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.780294 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z2b26"] Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.804632 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-z2b26"] Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.814140 4822 scope.go:117] "RemoveContainer" containerID="9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.857726 4822 scope.go:117] "RemoveContainer" containerID="2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712" Dec 01 07:57:25 crc kubenswrapper[4822]: E1201 07:57:25.858110 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712\": container with ID starting with 2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712 not found: ID does not exist" containerID="2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.858141 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712"} err="failed to get container status \"2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712\": rpc error: code = NotFound desc = could not find container \"2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712\": container with ID starting with 2759ff02a1187ca52138f8d18b833728066df171f353dbe762df78f3b531c712 not found: ID does not exist" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.858162 4822 scope.go:117] "RemoveContainer" containerID="ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965" Dec 01 07:57:25 crc kubenswrapper[4822]: E1201 07:57:25.858427 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965\": container with ID starting with ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965 not found: ID does not exist" containerID="ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.858455 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965"} err="failed to get container status \"ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965\": rpc error: code = NotFound desc = could not find container \"ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965\": container with ID starting with ffdb550dbce63990c56f5969fa1c9207c8992398e94f767a2eee17d132cc1965 not found: ID does not exist" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.858471 4822 scope.go:117] "RemoveContainer" containerID="9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb" Dec 01 07:57:25 crc kubenswrapper[4822]: E1201 07:57:25.858853 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb\": container with ID starting with 9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb not found: ID does not exist" containerID="9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb" Dec 01 07:57:25 crc kubenswrapper[4822]: I1201 07:57:25.858879 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb"} err="failed to get container status \"9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb\": rpc error: code = NotFound desc = could not find container \"9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb\": container with ID starting with 9d36a29fe4ab379f1bc6bd639c24ecfc0c4b7c92037d06476c5013443f239afb not found: ID does not exist" Dec 01 07:57:26 crc kubenswrapper[4822]: I1201 07:57:26.966828 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" path="/var/lib/kubelet/pods/d5999b43-5ae5-4e1b-b413-adbf193fbe48/volumes" Dec 01 07:58:42 crc kubenswrapper[4822]: I1201 07:58:42.542795 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:58:42 crc kubenswrapper[4822]: I1201 07:58:42.543375 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:59:12 crc kubenswrapper[4822]: I1201 07:59:12.542417 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:59:12 crc kubenswrapper[4822]: I1201 07:59:12.543169 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:59:42 crc kubenswrapper[4822]: I1201 07:59:42.543030 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:59:42 crc kubenswrapper[4822]: I1201 07:59:42.543738 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:59:42 crc kubenswrapper[4822]: I1201 07:59:42.543818 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 07:59:42 crc kubenswrapper[4822]: I1201 07:59:42.544840 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1e7876090c2639b4c970893e49b7fbfd032096366e59e94f9018c4aedbda1cfc"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:59:42 crc kubenswrapper[4822]: I1201 07:59:42.544934 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://1e7876090c2639b4c970893e49b7fbfd032096366e59e94f9018c4aedbda1cfc" gracePeriod=600 Dec 01 07:59:43 crc kubenswrapper[4822]: I1201 07:59:43.118473 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="1e7876090c2639b4c970893e49b7fbfd032096366e59e94f9018c4aedbda1cfc" exitCode=0 Dec 01 07:59:43 crc kubenswrapper[4822]: I1201 07:59:43.118600 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"1e7876090c2639b4c970893e49b7fbfd032096366e59e94f9018c4aedbda1cfc"} Dec 01 07:59:43 crc kubenswrapper[4822]: I1201 07:59:43.118779 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912"} Dec 01 07:59:43 crc kubenswrapper[4822]: I1201 07:59:43.118812 4822 scope.go:117] "RemoveContainer" containerID="2e93eb40d34b2f314caf66124b5d8b96591cd49e1deeb3d663acc261312ec94f" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.227326 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8"] Dec 01 08:00:00 crc kubenswrapper[4822]: E1201 08:00:00.231254 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="extract-content" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.231277 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="extract-content" Dec 01 08:00:00 crc kubenswrapper[4822]: E1201 08:00:00.231312 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="registry-server" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.231323 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="registry-server" Dec 01 08:00:00 crc kubenswrapper[4822]: E1201 08:00:00.231349 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="extract-utilities" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.231357 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="extract-utilities" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.231631 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5999b43-5ae5-4e1b-b413-adbf193fbe48" containerName="registry-server" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.232253 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.233585 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8"] Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.234776 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.235054 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.370798 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd5068c-f0ed-4097-90b6-94bc784cffac-config-volume\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.371228 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd5068c-f0ed-4097-90b6-94bc784cffac-secret-volume\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.371272 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7gzf\" (UniqueName: \"kubernetes.io/projected/7bd5068c-f0ed-4097-90b6-94bc784cffac-kube-api-access-w7gzf\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.473191 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd5068c-f0ed-4097-90b6-94bc784cffac-secret-volume\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.473256 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7gzf\" (UniqueName: \"kubernetes.io/projected/7bd5068c-f0ed-4097-90b6-94bc784cffac-kube-api-access-w7gzf\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.474477 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd5068c-f0ed-4097-90b6-94bc784cffac-config-volume\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.475387 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd5068c-f0ed-4097-90b6-94bc784cffac-config-volume\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.479865 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd5068c-f0ed-4097-90b6-94bc784cffac-secret-volume\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.501812 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7gzf\" (UniqueName: \"kubernetes.io/projected/7bd5068c-f0ed-4097-90b6-94bc784cffac-kube-api-access-w7gzf\") pod \"collect-profiles-29409600-bvrn8\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:00 crc kubenswrapper[4822]: I1201 08:00:00.561249 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:01 crc kubenswrapper[4822]: I1201 08:00:01.117485 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8"] Dec 01 08:00:01 crc kubenswrapper[4822]: I1201 08:00:01.299251 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" event={"ID":"7bd5068c-f0ed-4097-90b6-94bc784cffac","Type":"ContainerStarted","Data":"c36ab8cb778c6dec7ce4e493733cac466838daa960c704413ea4ec58b407b024"} Dec 01 08:00:01 crc kubenswrapper[4822]: I1201 08:00:01.299534 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" event={"ID":"7bd5068c-f0ed-4097-90b6-94bc784cffac","Type":"ContainerStarted","Data":"ebc242dcc72620f8a9cca10deed617c975350ada029a1df330de1605f061ea39"} Dec 01 08:00:01 crc kubenswrapper[4822]: I1201 08:00:01.319790 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" podStartSLOduration=1.319771089 podStartE2EDuration="1.319771089s" podCreationTimestamp="2025-12-01 08:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 08:00:01.315693803 +0000 UTC m=+4156.636501489" watchObservedRunningTime="2025-12-01 08:00:01.319771089 +0000 UTC m=+4156.640578775" Dec 01 08:00:02 crc kubenswrapper[4822]: I1201 08:00:02.309778 4822 generic.go:334] "Generic (PLEG): container finished" podID="7bd5068c-f0ed-4097-90b6-94bc784cffac" containerID="c36ab8cb778c6dec7ce4e493733cac466838daa960c704413ea4ec58b407b024" exitCode=0 Dec 01 08:00:02 crc kubenswrapper[4822]: I1201 08:00:02.309866 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" event={"ID":"7bd5068c-f0ed-4097-90b6-94bc784cffac","Type":"ContainerDied","Data":"c36ab8cb778c6dec7ce4e493733cac466838daa960c704413ea4ec58b407b024"} Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.691461 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.852880 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7gzf\" (UniqueName: \"kubernetes.io/projected/7bd5068c-f0ed-4097-90b6-94bc784cffac-kube-api-access-w7gzf\") pod \"7bd5068c-f0ed-4097-90b6-94bc784cffac\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.853312 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd5068c-f0ed-4097-90b6-94bc784cffac-secret-volume\") pod \"7bd5068c-f0ed-4097-90b6-94bc784cffac\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.854344 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd5068c-f0ed-4097-90b6-94bc784cffac-config-volume\") pod \"7bd5068c-f0ed-4097-90b6-94bc784cffac\" (UID: \"7bd5068c-f0ed-4097-90b6-94bc784cffac\") " Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.855033 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd5068c-f0ed-4097-90b6-94bc784cffac-config-volume" (OuterVolumeSpecName: "config-volume") pod "7bd5068c-f0ed-4097-90b6-94bc784cffac" (UID: "7bd5068c-f0ed-4097-90b6-94bc784cffac"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.861258 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bd5068c-f0ed-4097-90b6-94bc784cffac-kube-api-access-w7gzf" (OuterVolumeSpecName: "kube-api-access-w7gzf") pod "7bd5068c-f0ed-4097-90b6-94bc784cffac" (UID: "7bd5068c-f0ed-4097-90b6-94bc784cffac"). InnerVolumeSpecName "kube-api-access-w7gzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.861662 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bd5068c-f0ed-4097-90b6-94bc784cffac-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7bd5068c-f0ed-4097-90b6-94bc784cffac" (UID: "7bd5068c-f0ed-4097-90b6-94bc784cffac"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.955404 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7gzf\" (UniqueName: \"kubernetes.io/projected/7bd5068c-f0ed-4097-90b6-94bc784cffac-kube-api-access-w7gzf\") on node \"crc\" DevicePath \"\"" Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.955441 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd5068c-f0ed-4097-90b6-94bc784cffac-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 08:00:03 crc kubenswrapper[4822]: I1201 08:00:03.955460 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd5068c-f0ed-4097-90b6-94bc784cffac-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 08:00:04 crc kubenswrapper[4822]: I1201 08:00:04.332427 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" event={"ID":"7bd5068c-f0ed-4097-90b6-94bc784cffac","Type":"ContainerDied","Data":"ebc242dcc72620f8a9cca10deed617c975350ada029a1df330de1605f061ea39"} Dec 01 08:00:04 crc kubenswrapper[4822]: I1201 08:00:04.332510 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebc242dcc72620f8a9cca10deed617c975350ada029a1df330de1605f061ea39" Dec 01 08:00:04 crc kubenswrapper[4822]: I1201 08:00:04.332679 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409600-bvrn8" Dec 01 08:00:04 crc kubenswrapper[4822]: I1201 08:00:04.428300 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz"] Dec 01 08:00:04 crc kubenswrapper[4822]: I1201 08:00:04.437377 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-z4lqz"] Dec 01 08:00:04 crc kubenswrapper[4822]: I1201 08:00:04.971453 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1033bda-45cf-46f7-b21d-1d12a4a4a33a" path="/var/lib/kubelet/pods/a1033bda-45cf-46f7-b21d-1d12a4a4a33a/volumes" Dec 01 08:00:51 crc kubenswrapper[4822]: I1201 08:00:51.161080 4822 scope.go:117] "RemoveContainer" containerID="60a6a784e18bcd7c87b9230c59ddc67e16647a6e9721f0b41d5b853a838fa70a" Dec 01 08:01:17 crc kubenswrapper[4822]: I1201 08:01:17.987811 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2gb8t"] Dec 01 08:01:17 crc kubenswrapper[4822]: E1201 08:01:17.988927 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd5068c-f0ed-4097-90b6-94bc784cffac" containerName="collect-profiles" Dec 01 08:01:17 crc kubenswrapper[4822]: I1201 08:01:17.988949 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd5068c-f0ed-4097-90b6-94bc784cffac" containerName="collect-profiles" Dec 01 08:01:17 crc kubenswrapper[4822]: I1201 08:01:17.989255 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bd5068c-f0ed-4097-90b6-94bc784cffac" containerName="collect-profiles" Dec 01 08:01:17 crc kubenswrapper[4822]: I1201 08:01:17.991009 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:17 crc kubenswrapper[4822]: I1201 08:01:17.995986 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gb8t"] Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.039143 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-utilities\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.039387 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ljl6\" (UniqueName: \"kubernetes.io/projected/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-kube-api-access-4ljl6\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.040247 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-catalog-content\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.140989 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ljl6\" (UniqueName: \"kubernetes.io/projected/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-kube-api-access-4ljl6\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.141133 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-catalog-content\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.141778 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-catalog-content\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.141881 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-utilities\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.142591 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-utilities\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.164746 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ljl6\" (UniqueName: \"kubernetes.io/projected/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-kube-api-access-4ljl6\") pod \"community-operators-2gb8t\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.326599 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:18 crc kubenswrapper[4822]: I1201 08:01:18.790248 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2gb8t"] Dec 01 08:01:19 crc kubenswrapper[4822]: I1201 08:01:19.029598 4822 generic.go:334] "Generic (PLEG): container finished" podID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerID="5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f" exitCode=0 Dec 01 08:01:19 crc kubenswrapper[4822]: I1201 08:01:19.029668 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gb8t" event={"ID":"df6a0b6f-b0b2-46ab-a164-4f4479ad5919","Type":"ContainerDied","Data":"5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f"} Dec 01 08:01:19 crc kubenswrapper[4822]: I1201 08:01:19.030043 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gb8t" event={"ID":"df6a0b6f-b0b2-46ab-a164-4f4479ad5919","Type":"ContainerStarted","Data":"4adbd12f68bc9225a1adf0ae8234dddc2aa8f67f5d5130f2ee667124620111a5"} Dec 01 08:01:21 crc kubenswrapper[4822]: I1201 08:01:21.054469 4822 generic.go:334] "Generic (PLEG): container finished" podID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerID="c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306" exitCode=0 Dec 01 08:01:21 crc kubenswrapper[4822]: I1201 08:01:21.054620 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gb8t" event={"ID":"df6a0b6f-b0b2-46ab-a164-4f4479ad5919","Type":"ContainerDied","Data":"c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306"} Dec 01 08:01:21 crc kubenswrapper[4822]: I1201 08:01:21.057103 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 08:01:23 crc kubenswrapper[4822]: I1201 08:01:23.073204 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gb8t" event={"ID":"df6a0b6f-b0b2-46ab-a164-4f4479ad5919","Type":"ContainerStarted","Data":"3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627"} Dec 01 08:01:23 crc kubenswrapper[4822]: I1201 08:01:23.104931 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2gb8t" podStartSLOduration=3.003038731 podStartE2EDuration="6.104909159s" podCreationTimestamp="2025-12-01 08:01:17 +0000 UTC" firstStartedPulling="2025-12-01 08:01:19.031055424 +0000 UTC m=+4234.351863110" lastFinishedPulling="2025-12-01 08:01:22.132925812 +0000 UTC m=+4237.453733538" observedRunningTime="2025-12-01 08:01:23.096459613 +0000 UTC m=+4238.417267299" watchObservedRunningTime="2025-12-01 08:01:23.104909159 +0000 UTC m=+4238.425716855" Dec 01 08:01:28 crc kubenswrapper[4822]: I1201 08:01:28.327771 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:28 crc kubenswrapper[4822]: I1201 08:01:28.328236 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:28 crc kubenswrapper[4822]: I1201 08:01:28.386339 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:29 crc kubenswrapper[4822]: I1201 08:01:29.181250 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:31 crc kubenswrapper[4822]: I1201 08:01:31.757143 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gb8t"] Dec 01 08:01:31 crc kubenswrapper[4822]: I1201 08:01:31.757868 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2gb8t" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="registry-server" containerID="cri-o://3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627" gracePeriod=2 Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.718018 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.908466 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-utilities\") pod \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.908562 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-catalog-content\") pod \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.908643 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ljl6\" (UniqueName: \"kubernetes.io/projected/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-kube-api-access-4ljl6\") pod \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\" (UID: \"df6a0b6f-b0b2-46ab-a164-4f4479ad5919\") " Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.909670 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-utilities" (OuterVolumeSpecName: "utilities") pod "df6a0b6f-b0b2-46ab-a164-4f4479ad5919" (UID: "df6a0b6f-b0b2-46ab-a164-4f4479ad5919"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.919850 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-kube-api-access-4ljl6" (OuterVolumeSpecName: "kube-api-access-4ljl6") pod "df6a0b6f-b0b2-46ab-a164-4f4479ad5919" (UID: "df6a0b6f-b0b2-46ab-a164-4f4479ad5919"). InnerVolumeSpecName "kube-api-access-4ljl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:01:32 crc kubenswrapper[4822]: I1201 08:01:32.955527 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df6a0b6f-b0b2-46ab-a164-4f4479ad5919" (UID: "df6a0b6f-b0b2-46ab-a164-4f4479ad5919"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.010519 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.010578 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.010594 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ljl6\" (UniqueName: \"kubernetes.io/projected/df6a0b6f-b0b2-46ab-a164-4f4479ad5919-kube-api-access-4ljl6\") on node \"crc\" DevicePath \"\"" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.165429 4822 generic.go:334] "Generic (PLEG): container finished" podID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerID="3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627" exitCode=0 Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.165518 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2gb8t" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.165601 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gb8t" event={"ID":"df6a0b6f-b0b2-46ab-a164-4f4479ad5919","Type":"ContainerDied","Data":"3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627"} Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.166721 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2gb8t" event={"ID":"df6a0b6f-b0b2-46ab-a164-4f4479ad5919","Type":"ContainerDied","Data":"4adbd12f68bc9225a1adf0ae8234dddc2aa8f67f5d5130f2ee667124620111a5"} Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.166823 4822 scope.go:117] "RemoveContainer" containerID="3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.192219 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2gb8t"] Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.194049 4822 scope.go:117] "RemoveContainer" containerID="c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.197885 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2gb8t"] Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.231321 4822 scope.go:117] "RemoveContainer" containerID="5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.254663 4822 scope.go:117] "RemoveContainer" containerID="3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627" Dec 01 08:01:33 crc kubenswrapper[4822]: E1201 08:01:33.255199 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627\": container with ID starting with 3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627 not found: ID does not exist" containerID="3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.255248 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627"} err="failed to get container status \"3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627\": rpc error: code = NotFound desc = could not find container \"3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627\": container with ID starting with 3d1a5db4563c3ff495150e116d5efe748a098169ef130d84739e540097e7b627 not found: ID does not exist" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.255277 4822 scope.go:117] "RemoveContainer" containerID="c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306" Dec 01 08:01:33 crc kubenswrapper[4822]: E1201 08:01:33.255662 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306\": container with ID starting with c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306 not found: ID does not exist" containerID="c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.255719 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306"} err="failed to get container status \"c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306\": rpc error: code = NotFound desc = could not find container \"c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306\": container with ID starting with c60943aad7342b356de11399edf8dbb1b14c064cfc2288b564b1d5ad62690306 not found: ID does not exist" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.255756 4822 scope.go:117] "RemoveContainer" containerID="5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f" Dec 01 08:01:33 crc kubenswrapper[4822]: E1201 08:01:33.256083 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f\": container with ID starting with 5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f not found: ID does not exist" containerID="5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f" Dec 01 08:01:33 crc kubenswrapper[4822]: I1201 08:01:33.256119 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f"} err="failed to get container status \"5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f\": rpc error: code = NotFound desc = could not find container \"5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f\": container with ID starting with 5aeda3211f2754f82306751dcdeac2acf16beda1f82b965b3019de3d7975e78f not found: ID does not exist" Dec 01 08:01:34 crc kubenswrapper[4822]: I1201 08:01:34.968906 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" path="/var/lib/kubelet/pods/df6a0b6f-b0b2-46ab-a164-4f4479ad5919/volumes" Dec 01 08:01:42 crc kubenswrapper[4822]: I1201 08:01:42.543014 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:01:42 crc kubenswrapper[4822]: I1201 08:01:42.543923 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:02:12 crc kubenswrapper[4822]: I1201 08:02:12.542779 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:02:12 crc kubenswrapper[4822]: I1201 08:02:12.543687 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.542748 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.543409 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.543480 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.544363 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.544471 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" gracePeriod=600 Dec 01 08:02:42 crc kubenswrapper[4822]: E1201 08:02:42.675680 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.978039 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" exitCode=0 Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.978128 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912"} Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.978196 4822 scope.go:117] "RemoveContainer" containerID="1e7876090c2639b4c970893e49b7fbfd032096366e59e94f9018c4aedbda1cfc" Dec 01 08:02:42 crc kubenswrapper[4822]: I1201 08:02:42.979821 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:02:42 crc kubenswrapper[4822]: E1201 08:02:42.980611 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:02:53 crc kubenswrapper[4822]: I1201 08:02:53.951362 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:02:53 crc kubenswrapper[4822]: E1201 08:02:53.952467 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:03:06 crc kubenswrapper[4822]: I1201 08:03:06.951380 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:03:06 crc kubenswrapper[4822]: E1201 08:03:06.954127 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:03:19 crc kubenswrapper[4822]: I1201 08:03:19.951521 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:03:19 crc kubenswrapper[4822]: E1201 08:03:19.952485 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:03:32 crc kubenswrapper[4822]: I1201 08:03:32.951438 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:03:32 crc kubenswrapper[4822]: E1201 08:03:32.952978 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:03:45 crc kubenswrapper[4822]: I1201 08:03:45.951152 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:03:45 crc kubenswrapper[4822]: E1201 08:03:45.952534 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:04:00 crc kubenswrapper[4822]: I1201 08:04:00.951505 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:04:00 crc kubenswrapper[4822]: E1201 08:04:00.952251 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:04:14 crc kubenswrapper[4822]: I1201 08:04:14.962397 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:04:14 crc kubenswrapper[4822]: E1201 08:04:14.963778 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:04:29 crc kubenswrapper[4822]: I1201 08:04:29.950931 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:04:29 crc kubenswrapper[4822]: E1201 08:04:29.951735 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:04:43 crc kubenswrapper[4822]: I1201 08:04:43.950721 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:04:43 crc kubenswrapper[4822]: E1201 08:04:43.951973 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:04:57 crc kubenswrapper[4822]: I1201 08:04:57.950736 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:04:57 crc kubenswrapper[4822]: E1201 08:04:57.951444 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:05:12 crc kubenswrapper[4822]: I1201 08:05:12.952000 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:05:12 crc kubenswrapper[4822]: E1201 08:05:12.952935 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:05:25 crc kubenswrapper[4822]: I1201 08:05:25.950842 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:05:25 crc kubenswrapper[4822]: E1201 08:05:25.951615 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:05:36 crc kubenswrapper[4822]: I1201 08:05:36.951864 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:05:36 crc kubenswrapper[4822]: E1201 08:05:36.953574 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:05:48 crc kubenswrapper[4822]: I1201 08:05:48.956784 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:05:48 crc kubenswrapper[4822]: E1201 08:05:48.957619 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:06:01 crc kubenswrapper[4822]: I1201 08:06:01.950682 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:06:01 crc kubenswrapper[4822]: E1201 08:06:01.951876 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:06:12 crc kubenswrapper[4822]: I1201 08:06:12.952603 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:06:12 crc kubenswrapper[4822]: E1201 08:06:12.953526 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:06:23 crc kubenswrapper[4822]: I1201 08:06:23.950849 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:06:23 crc kubenswrapper[4822]: E1201 08:06:23.951478 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:06:37 crc kubenswrapper[4822]: I1201 08:06:37.951419 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:06:37 crc kubenswrapper[4822]: E1201 08:06:37.952377 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:06:48 crc kubenswrapper[4822]: I1201 08:06:48.951330 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:06:48 crc kubenswrapper[4822]: E1201 08:06:48.952412 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:07:00 crc kubenswrapper[4822]: I1201 08:07:00.951138 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:07:00 crc kubenswrapper[4822]: E1201 08:07:00.952042 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.528474 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p2z92"] Dec 01 08:07:14 crc kubenswrapper[4822]: E1201 08:07:14.529499 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="extract-content" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.529531 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="extract-content" Dec 01 08:07:14 crc kubenswrapper[4822]: E1201 08:07:14.529624 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="extract-utilities" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.529638 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="extract-utilities" Dec 01 08:07:14 crc kubenswrapper[4822]: E1201 08:07:14.529660 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="registry-server" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.529671 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="registry-server" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.530005 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="df6a0b6f-b0b2-46ab-a164-4f4479ad5919" containerName="registry-server" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.531584 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.537834 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-utilities\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.537996 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-catalog-content\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.538041 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvmt7\" (UniqueName: \"kubernetes.io/projected/4a567dd1-b46d-4b04-bf6c-49f50b59a595-kube-api-access-fvmt7\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.565799 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p2z92"] Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.638854 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-utilities\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.638972 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-catalog-content\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.638998 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvmt7\" (UniqueName: \"kubernetes.io/projected/4a567dd1-b46d-4b04-bf6c-49f50b59a595-kube-api-access-fvmt7\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.639842 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-utilities\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.639872 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-catalog-content\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.673268 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvmt7\" (UniqueName: \"kubernetes.io/projected/4a567dd1-b46d-4b04-bf6c-49f50b59a595-kube-api-access-fvmt7\") pod \"certified-operators-p2z92\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:14 crc kubenswrapper[4822]: I1201 08:07:14.873855 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:15 crc kubenswrapper[4822]: I1201 08:07:15.382115 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p2z92"] Dec 01 08:07:15 crc kubenswrapper[4822]: I1201 08:07:15.703690 4822 generic.go:334] "Generic (PLEG): container finished" podID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerID="b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40" exitCode=0 Dec 01 08:07:15 crc kubenswrapper[4822]: I1201 08:07:15.703811 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p2z92" event={"ID":"4a567dd1-b46d-4b04-bf6c-49f50b59a595","Type":"ContainerDied","Data":"b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40"} Dec 01 08:07:15 crc kubenswrapper[4822]: I1201 08:07:15.704016 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p2z92" event={"ID":"4a567dd1-b46d-4b04-bf6c-49f50b59a595","Type":"ContainerStarted","Data":"e59e0ab34b2565d164c81b47897cfb62809e5f4a4a2a34811ae58b51e219472e"} Dec 01 08:07:15 crc kubenswrapper[4822]: I1201 08:07:15.706502 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 08:07:15 crc kubenswrapper[4822]: I1201 08:07:15.951851 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:07:15 crc kubenswrapper[4822]: E1201 08:07:15.952315 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:07:17 crc kubenswrapper[4822]: I1201 08:07:17.723030 4822 generic.go:334] "Generic (PLEG): container finished" podID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerID="b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797" exitCode=0 Dec 01 08:07:17 crc kubenswrapper[4822]: I1201 08:07:17.723243 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p2z92" event={"ID":"4a567dd1-b46d-4b04-bf6c-49f50b59a595","Type":"ContainerDied","Data":"b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797"} Dec 01 08:07:19 crc kubenswrapper[4822]: I1201 08:07:19.741925 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p2z92" event={"ID":"4a567dd1-b46d-4b04-bf6c-49f50b59a595","Type":"ContainerStarted","Data":"852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4"} Dec 01 08:07:19 crc kubenswrapper[4822]: I1201 08:07:19.781761 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p2z92" podStartSLOduration=2.754915817 podStartE2EDuration="5.78172843s" podCreationTimestamp="2025-12-01 08:07:14 +0000 UTC" firstStartedPulling="2025-12-01 08:07:15.706185958 +0000 UTC m=+4591.026993644" lastFinishedPulling="2025-12-01 08:07:18.732998571 +0000 UTC m=+4594.053806257" observedRunningTime="2025-12-01 08:07:19.772334325 +0000 UTC m=+4595.093142051" watchObservedRunningTime="2025-12-01 08:07:19.78172843 +0000 UTC m=+4595.102536156" Dec 01 08:07:24 crc kubenswrapper[4822]: I1201 08:07:24.874250 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:24 crc kubenswrapper[4822]: I1201 08:07:24.874908 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:24 crc kubenswrapper[4822]: I1201 08:07:24.927327 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:25 crc kubenswrapper[4822]: I1201 08:07:25.885679 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:25 crc kubenswrapper[4822]: I1201 08:07:25.953074 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p2z92"] Dec 01 08:07:26 crc kubenswrapper[4822]: I1201 08:07:26.951131 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:07:26 crc kubenswrapper[4822]: E1201 08:07:26.951817 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:07:27 crc kubenswrapper[4822]: I1201 08:07:27.821636 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p2z92" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="registry-server" containerID="cri-o://852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4" gracePeriod=2 Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.786911 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.833222 4822 generic.go:334] "Generic (PLEG): container finished" podID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerID="852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4" exitCode=0 Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.833271 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p2z92" event={"ID":"4a567dd1-b46d-4b04-bf6c-49f50b59a595","Type":"ContainerDied","Data":"852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4"} Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.833299 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p2z92" event={"ID":"4a567dd1-b46d-4b04-bf6c-49f50b59a595","Type":"ContainerDied","Data":"e59e0ab34b2565d164c81b47897cfb62809e5f4a4a2a34811ae58b51e219472e"} Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.833316 4822 scope.go:117] "RemoveContainer" containerID="852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.833497 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p2z92" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.852678 4822 scope.go:117] "RemoveContainer" containerID="b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.872444 4822 scope.go:117] "RemoveContainer" containerID="b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.902649 4822 scope.go:117] "RemoveContainer" containerID="852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4" Dec 01 08:07:28 crc kubenswrapper[4822]: E1201 08:07:28.903299 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4\": container with ID starting with 852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4 not found: ID does not exist" containerID="852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.903343 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4"} err="failed to get container status \"852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4\": rpc error: code = NotFound desc = could not find container \"852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4\": container with ID starting with 852681419d7d7c9c811f054ef85f9730d3a911207e3be47be7c5508dfcca31f4 not found: ID does not exist" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.903370 4822 scope.go:117] "RemoveContainer" containerID="b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797" Dec 01 08:07:28 crc kubenswrapper[4822]: E1201 08:07:28.903792 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797\": container with ID starting with b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797 not found: ID does not exist" containerID="b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.903851 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797"} err="failed to get container status \"b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797\": rpc error: code = NotFound desc = could not find container \"b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797\": container with ID starting with b3970d5f7d6c309a29496f8b1e4f8072f6f31e8eef9dc5ad20b38c432cb61797 not found: ID does not exist" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.903872 4822 scope.go:117] "RemoveContainer" containerID="b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40" Dec 01 08:07:28 crc kubenswrapper[4822]: E1201 08:07:28.904159 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40\": container with ID starting with b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40 not found: ID does not exist" containerID="b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.904190 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40"} err="failed to get container status \"b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40\": rpc error: code = NotFound desc = could not find container \"b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40\": container with ID starting with b883ec5af5b770ba306d78ba35ca8118c1b4dbb95064025fe68f31e4ceca7d40 not found: ID does not exist" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.972671 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-utilities\") pod \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.972758 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvmt7\" (UniqueName: \"kubernetes.io/projected/4a567dd1-b46d-4b04-bf6c-49f50b59a595-kube-api-access-fvmt7\") pod \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.972793 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-catalog-content\") pod \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\" (UID: \"4a567dd1-b46d-4b04-bf6c-49f50b59a595\") " Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.973675 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-utilities" (OuterVolumeSpecName: "utilities") pod "4a567dd1-b46d-4b04-bf6c-49f50b59a595" (UID: "4a567dd1-b46d-4b04-bf6c-49f50b59a595"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:07:28 crc kubenswrapper[4822]: I1201 08:07:28.983100 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a567dd1-b46d-4b04-bf6c-49f50b59a595-kube-api-access-fvmt7" (OuterVolumeSpecName: "kube-api-access-fvmt7") pod "4a567dd1-b46d-4b04-bf6c-49f50b59a595" (UID: "4a567dd1-b46d-4b04-bf6c-49f50b59a595"). InnerVolumeSpecName "kube-api-access-fvmt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:07:29 crc kubenswrapper[4822]: I1201 08:07:29.032228 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a567dd1-b46d-4b04-bf6c-49f50b59a595" (UID: "4a567dd1-b46d-4b04-bf6c-49f50b59a595"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:07:29 crc kubenswrapper[4822]: I1201 08:07:29.076005 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:29 crc kubenswrapper[4822]: I1201 08:07:29.076446 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvmt7\" (UniqueName: \"kubernetes.io/projected/4a567dd1-b46d-4b04-bf6c-49f50b59a595-kube-api-access-fvmt7\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:29 crc kubenswrapper[4822]: I1201 08:07:29.076544 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a567dd1-b46d-4b04-bf6c-49f50b59a595-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:29 crc kubenswrapper[4822]: I1201 08:07:29.173047 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p2z92"] Dec 01 08:07:29 crc kubenswrapper[4822]: I1201 08:07:29.187074 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p2z92"] Dec 01 08:07:30 crc kubenswrapper[4822]: I1201 08:07:30.966096 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" path="/var/lib/kubelet/pods/4a567dd1-b46d-4b04-bf6c-49f50b59a595/volumes" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.059320 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jmq2n"] Dec 01 08:07:31 crc kubenswrapper[4822]: E1201 08:07:31.060054 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="extract-content" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.060103 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="extract-content" Dec 01 08:07:31 crc kubenswrapper[4822]: E1201 08:07:31.060149 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="registry-server" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.060166 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="registry-server" Dec 01 08:07:31 crc kubenswrapper[4822]: E1201 08:07:31.060203 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="extract-utilities" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.060220 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="extract-utilities" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.060604 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a567dd1-b46d-4b04-bf6c-49f50b59a595" containerName="registry-server" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.062935 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.070576 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jmq2n"] Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.126961 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9pvw\" (UniqueName: \"kubernetes.io/projected/3a73b060-2595-4c1f-8543-8fb85a533f47-kube-api-access-r9pvw\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.127033 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-utilities\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.127065 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-catalog-content\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.228234 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9pvw\" (UniqueName: \"kubernetes.io/projected/3a73b060-2595-4c1f-8543-8fb85a533f47-kube-api-access-r9pvw\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.228293 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-utilities\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.228318 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-catalog-content\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.228793 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-catalog-content\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.228940 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-utilities\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.251502 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9pvw\" (UniqueName: \"kubernetes.io/projected/3a73b060-2595-4c1f-8543-8fb85a533f47-kube-api-access-r9pvw\") pod \"redhat-operators-jmq2n\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.410694 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:31 crc kubenswrapper[4822]: I1201 08:07:31.885779 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jmq2n"] Dec 01 08:07:32 crc kubenswrapper[4822]: I1201 08:07:32.872850 4822 generic.go:334] "Generic (PLEG): container finished" podID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerID="446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9" exitCode=0 Dec 01 08:07:32 crc kubenswrapper[4822]: I1201 08:07:32.873167 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmq2n" event={"ID":"3a73b060-2595-4c1f-8543-8fb85a533f47","Type":"ContainerDied","Data":"446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9"} Dec 01 08:07:32 crc kubenswrapper[4822]: I1201 08:07:32.873251 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmq2n" event={"ID":"3a73b060-2595-4c1f-8543-8fb85a533f47","Type":"ContainerStarted","Data":"d32c81b03f276d1b401c0f56cd1c5551bb3ae45965ca4f23c030b2c3daeee2c3"} Dec 01 08:07:34 crc kubenswrapper[4822]: I1201 08:07:34.899267 4822 generic.go:334] "Generic (PLEG): container finished" podID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerID="a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759" exitCode=0 Dec 01 08:07:34 crc kubenswrapper[4822]: I1201 08:07:34.899638 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmq2n" event={"ID":"3a73b060-2595-4c1f-8543-8fb85a533f47","Type":"ContainerDied","Data":"a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759"} Dec 01 08:07:35 crc kubenswrapper[4822]: I1201 08:07:35.914256 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmq2n" event={"ID":"3a73b060-2595-4c1f-8543-8fb85a533f47","Type":"ContainerStarted","Data":"91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58"} Dec 01 08:07:35 crc kubenswrapper[4822]: I1201 08:07:35.935808 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jmq2n" podStartSLOduration=2.364038094 podStartE2EDuration="4.935783861s" podCreationTimestamp="2025-12-01 08:07:31 +0000 UTC" firstStartedPulling="2025-12-01 08:07:32.875176583 +0000 UTC m=+4608.195984309" lastFinishedPulling="2025-12-01 08:07:35.44692235 +0000 UTC m=+4610.767730076" observedRunningTime="2025-12-01 08:07:35.933127406 +0000 UTC m=+4611.253935142" watchObservedRunningTime="2025-12-01 08:07:35.935783861 +0000 UTC m=+4611.256591547" Dec 01 08:07:40 crc kubenswrapper[4822]: I1201 08:07:40.950921 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:07:40 crc kubenswrapper[4822]: E1201 08:07:40.951541 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:07:41 crc kubenswrapper[4822]: I1201 08:07:41.411509 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:41 crc kubenswrapper[4822]: I1201 08:07:41.411871 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:42 crc kubenswrapper[4822]: I1201 08:07:42.481136 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jmq2n" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="registry-server" probeResult="failure" output=< Dec 01 08:07:42 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 08:07:42 crc kubenswrapper[4822]: > Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.760381 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5dvvd"] Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.763994 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.769321 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dvvd"] Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.845718 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm4hl\" (UniqueName: \"kubernetes.io/projected/08b00ada-980d-43f0-a950-22f75052eff4-kube-api-access-bm4hl\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.845792 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-utilities\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.845969 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-catalog-content\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.947758 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-utilities\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.947870 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-catalog-content\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.947970 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm4hl\" (UniqueName: \"kubernetes.io/projected/08b00ada-980d-43f0-a950-22f75052eff4-kube-api-access-bm4hl\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.948658 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-utilities\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.949275 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-catalog-content\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:43 crc kubenswrapper[4822]: I1201 08:07:43.968977 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm4hl\" (UniqueName: \"kubernetes.io/projected/08b00ada-980d-43f0-a950-22f75052eff4-kube-api-access-bm4hl\") pod \"redhat-marketplace-5dvvd\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:44 crc kubenswrapper[4822]: I1201 08:07:44.103076 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:44 crc kubenswrapper[4822]: W1201 08:07:44.565040 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08b00ada_980d_43f0_a950_22f75052eff4.slice/crio-5cd9347b8740a8c2372b66f36c54cc6e673557fa5ff56dbbc2166a2063dbfc2b WatchSource:0}: Error finding container 5cd9347b8740a8c2372b66f36c54cc6e673557fa5ff56dbbc2166a2063dbfc2b: Status 404 returned error can't find the container with id 5cd9347b8740a8c2372b66f36c54cc6e673557fa5ff56dbbc2166a2063dbfc2b Dec 01 08:07:44 crc kubenswrapper[4822]: I1201 08:07:44.570632 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dvvd"] Dec 01 08:07:45 crc kubenswrapper[4822]: I1201 08:07:45.320170 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerStarted","Data":"8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a"} Dec 01 08:07:45 crc kubenswrapper[4822]: I1201 08:07:45.320468 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerStarted","Data":"5cd9347b8740a8c2372b66f36c54cc6e673557fa5ff56dbbc2166a2063dbfc2b"} Dec 01 08:07:47 crc kubenswrapper[4822]: I1201 08:07:47.346068 4822 generic.go:334] "Generic (PLEG): container finished" podID="08b00ada-980d-43f0-a950-22f75052eff4" containerID="8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a" exitCode=0 Dec 01 08:07:47 crc kubenswrapper[4822]: I1201 08:07:47.346168 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerDied","Data":"8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a"} Dec 01 08:07:49 crc kubenswrapper[4822]: I1201 08:07:49.367793 4822 generic.go:334] "Generic (PLEG): container finished" podID="08b00ada-980d-43f0-a950-22f75052eff4" containerID="57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1" exitCode=0 Dec 01 08:07:49 crc kubenswrapper[4822]: I1201 08:07:49.367869 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerDied","Data":"57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1"} Dec 01 08:07:51 crc kubenswrapper[4822]: I1201 08:07:51.390441 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerStarted","Data":"f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09"} Dec 01 08:07:51 crc kubenswrapper[4822]: I1201 08:07:51.418955 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5dvvd" podStartSLOduration=5.851637895 podStartE2EDuration="8.418932116s" podCreationTimestamp="2025-12-01 08:07:43 +0000 UTC" firstStartedPulling="2025-12-01 08:07:47.350783542 +0000 UTC m=+4622.671591268" lastFinishedPulling="2025-12-01 08:07:49.918077793 +0000 UTC m=+4625.238885489" observedRunningTime="2025-12-01 08:07:51.416094905 +0000 UTC m=+4626.736902611" watchObservedRunningTime="2025-12-01 08:07:51.418932116 +0000 UTC m=+4626.739739822" Dec 01 08:07:51 crc kubenswrapper[4822]: I1201 08:07:51.467374 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:51 crc kubenswrapper[4822]: I1201 08:07:51.520620 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:51 crc kubenswrapper[4822]: I1201 08:07:51.950407 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:07:52 crc kubenswrapper[4822]: I1201 08:07:52.119085 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jmq2n"] Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.411908 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"1c67073cec33e28a0f6fef7f6096b7299ad7147babf07318be6bdf1c58b46d4b"} Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.412146 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jmq2n" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="registry-server" containerID="cri-o://91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58" gracePeriod=2 Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.784664 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.894572 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-catalog-content\") pod \"3a73b060-2595-4c1f-8543-8fb85a533f47\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.895785 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9pvw\" (UniqueName: \"kubernetes.io/projected/3a73b060-2595-4c1f-8543-8fb85a533f47-kube-api-access-r9pvw\") pod \"3a73b060-2595-4c1f-8543-8fb85a533f47\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.895904 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-utilities\") pod \"3a73b060-2595-4c1f-8543-8fb85a533f47\" (UID: \"3a73b060-2595-4c1f-8543-8fb85a533f47\") " Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.896644 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-utilities" (OuterVolumeSpecName: "utilities") pod "3a73b060-2595-4c1f-8543-8fb85a533f47" (UID: "3a73b060-2595-4c1f-8543-8fb85a533f47"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.902354 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a73b060-2595-4c1f-8543-8fb85a533f47-kube-api-access-r9pvw" (OuterVolumeSpecName: "kube-api-access-r9pvw") pod "3a73b060-2595-4c1f-8543-8fb85a533f47" (UID: "3a73b060-2595-4c1f-8543-8fb85a533f47"). InnerVolumeSpecName "kube-api-access-r9pvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.997355 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9pvw\" (UniqueName: \"kubernetes.io/projected/3a73b060-2595-4c1f-8543-8fb85a533f47-kube-api-access-r9pvw\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:53 crc kubenswrapper[4822]: I1201 08:07:53.997385 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.011917 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a73b060-2595-4c1f-8543-8fb85a533f47" (UID: "3a73b060-2595-4c1f-8543-8fb85a533f47"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.098948 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a73b060-2595-4c1f-8543-8fb85a533f47-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.104044 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.105050 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.153881 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.425960 4822 generic.go:334] "Generic (PLEG): container finished" podID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerID="91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58" exitCode=0 Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.426692 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmq2n" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.427028 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmq2n" event={"ID":"3a73b060-2595-4c1f-8543-8fb85a533f47","Type":"ContainerDied","Data":"91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58"} Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.427072 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmq2n" event={"ID":"3a73b060-2595-4c1f-8543-8fb85a533f47","Type":"ContainerDied","Data":"d32c81b03f276d1b401c0f56cd1c5551bb3ae45965ca4f23c030b2c3daeee2c3"} Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.427103 4822 scope.go:117] "RemoveContainer" containerID="91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.459682 4822 scope.go:117] "RemoveContainer" containerID="a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.471498 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jmq2n"] Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.482703 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jmq2n"] Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.491269 4822 scope.go:117] "RemoveContainer" containerID="446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.517494 4822 scope.go:117] "RemoveContainer" containerID="91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58" Dec 01 08:07:54 crc kubenswrapper[4822]: E1201 08:07:54.518076 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58\": container with ID starting with 91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58 not found: ID does not exist" containerID="91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.518133 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58"} err="failed to get container status \"91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58\": rpc error: code = NotFound desc = could not find container \"91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58\": container with ID starting with 91be958ccd6de13d6b44670dc2283fbeef6b0881dd43d9da7b8a4d721dc64b58 not found: ID does not exist" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.518169 4822 scope.go:117] "RemoveContainer" containerID="a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759" Dec 01 08:07:54 crc kubenswrapper[4822]: E1201 08:07:54.518763 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759\": container with ID starting with a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759 not found: ID does not exist" containerID="a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.518794 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759"} err="failed to get container status \"a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759\": rpc error: code = NotFound desc = could not find container \"a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759\": container with ID starting with a96172b5d403472b3991e7413ce90f2a1089ab545281a992947db70b0da70759 not found: ID does not exist" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.518814 4822 scope.go:117] "RemoveContainer" containerID="446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9" Dec 01 08:07:54 crc kubenswrapper[4822]: E1201 08:07:54.519394 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9\": container with ID starting with 446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9 not found: ID does not exist" containerID="446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.519433 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9"} err="failed to get container status \"446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9\": rpc error: code = NotFound desc = could not find container \"446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9\": container with ID starting with 446878e91aa9f31c877f2fd7fbe819fdaf273a562688c1e0efb5ebd29ca2d1a9 not found: ID does not exist" Dec 01 08:07:54 crc kubenswrapper[4822]: I1201 08:07:54.968883 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" path="/var/lib/kubelet/pods/3a73b060-2595-4c1f-8543-8fb85a533f47/volumes" Dec 01 08:07:55 crc kubenswrapper[4822]: I1201 08:07:55.515225 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:56 crc kubenswrapper[4822]: I1201 08:07:56.521640 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dvvd"] Dec 01 08:07:57 crc kubenswrapper[4822]: I1201 08:07:57.462139 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5dvvd" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="registry-server" containerID="cri-o://f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09" gracePeriod=2 Dec 01 08:07:57 crc kubenswrapper[4822]: I1201 08:07:57.900405 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.073972 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-catalog-content\") pod \"08b00ada-980d-43f0-a950-22f75052eff4\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.074087 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm4hl\" (UniqueName: \"kubernetes.io/projected/08b00ada-980d-43f0-a950-22f75052eff4-kube-api-access-bm4hl\") pod \"08b00ada-980d-43f0-a950-22f75052eff4\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.074141 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-utilities\") pod \"08b00ada-980d-43f0-a950-22f75052eff4\" (UID: \"08b00ada-980d-43f0-a950-22f75052eff4\") " Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.076793 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-utilities" (OuterVolumeSpecName: "utilities") pod "08b00ada-980d-43f0-a950-22f75052eff4" (UID: "08b00ada-980d-43f0-a950-22f75052eff4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.089278 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08b00ada-980d-43f0-a950-22f75052eff4-kube-api-access-bm4hl" (OuterVolumeSpecName: "kube-api-access-bm4hl") pod "08b00ada-980d-43f0-a950-22f75052eff4" (UID: "08b00ada-980d-43f0-a950-22f75052eff4"). InnerVolumeSpecName "kube-api-access-bm4hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.113833 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08b00ada-980d-43f0-a950-22f75052eff4" (UID: "08b00ada-980d-43f0-a950-22f75052eff4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.176364 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm4hl\" (UniqueName: \"kubernetes.io/projected/08b00ada-980d-43f0-a950-22f75052eff4-kube-api-access-bm4hl\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.176418 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.176436 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08b00ada-980d-43f0-a950-22f75052eff4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.475686 4822 generic.go:334] "Generic (PLEG): container finished" podID="08b00ada-980d-43f0-a950-22f75052eff4" containerID="f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09" exitCode=0 Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.475829 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dvvd" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.475828 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerDied","Data":"f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09"} Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.476309 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dvvd" event={"ID":"08b00ada-980d-43f0-a950-22f75052eff4","Type":"ContainerDied","Data":"5cd9347b8740a8c2372b66f36c54cc6e673557fa5ff56dbbc2166a2063dbfc2b"} Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.476345 4822 scope.go:117] "RemoveContainer" containerID="f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.525138 4822 scope.go:117] "RemoveContainer" containerID="57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.532531 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dvvd"] Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.539729 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dvvd"] Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.553827 4822 scope.go:117] "RemoveContainer" containerID="8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.585674 4822 scope.go:117] "RemoveContainer" containerID="f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09" Dec 01 08:07:58 crc kubenswrapper[4822]: E1201 08:07:58.586173 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09\": container with ID starting with f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09 not found: ID does not exist" containerID="f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.586208 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09"} err="failed to get container status \"f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09\": rpc error: code = NotFound desc = could not find container \"f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09\": container with ID starting with f03854170fea8dbb2686a2e939f173cc7b7e011e98f84e2b9e09afbcb188da09 not found: ID does not exist" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.586227 4822 scope.go:117] "RemoveContainer" containerID="57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1" Dec 01 08:07:58 crc kubenswrapper[4822]: E1201 08:07:58.586598 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1\": container with ID starting with 57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1 not found: ID does not exist" containerID="57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.586640 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1"} err="failed to get container status \"57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1\": rpc error: code = NotFound desc = could not find container \"57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1\": container with ID starting with 57f07071b17f31aaba523b4b96c60382c337a35d10e9716b20cdbfeffb56c5f1 not found: ID does not exist" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.586656 4822 scope.go:117] "RemoveContainer" containerID="8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a" Dec 01 08:07:58 crc kubenswrapper[4822]: E1201 08:07:58.586983 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a\": container with ID starting with 8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a not found: ID does not exist" containerID="8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.587009 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a"} err="failed to get container status \"8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a\": rpc error: code = NotFound desc = could not find container \"8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a\": container with ID starting with 8e9ed2b0fa4a29f87a7643915badba6f2bf0a8eb821f7fd48d6270cad339849a not found: ID does not exist" Dec 01 08:07:58 crc kubenswrapper[4822]: I1201 08:07:58.966425 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08b00ada-980d-43f0-a950-22f75052eff4" path="/var/lib/kubelet/pods/08b00ada-980d-43f0-a950-22f75052eff4/volumes" Dec 01 08:10:12 crc kubenswrapper[4822]: I1201 08:10:12.542848 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:10:12 crc kubenswrapper[4822]: I1201 08:10:12.546034 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:10:42 crc kubenswrapper[4822]: I1201 08:10:42.543410 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:10:42 crc kubenswrapper[4822]: I1201 08:10:42.544243 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.542948 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.543467 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.543509 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.544303 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c67073cec33e28a0f6fef7f6096b7299ad7147babf07318be6bdf1c58b46d4b"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.544373 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://1c67073cec33e28a0f6fef7f6096b7299ad7147babf07318be6bdf1c58b46d4b" gracePeriod=600 Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.969669 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="1c67073cec33e28a0f6fef7f6096b7299ad7147babf07318be6bdf1c58b46d4b" exitCode=0 Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.969911 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"1c67073cec33e28a0f6fef7f6096b7299ad7147babf07318be6bdf1c58b46d4b"} Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.970054 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856"} Dec 01 08:11:12 crc kubenswrapper[4822]: I1201 08:11:12.970084 4822 scope.go:117] "RemoveContainer" containerID="08c48df5f1f52e3b918e8d99d1af5aa5f511000a1d1379cfc3afe5843d356912" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.467319 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bprw7"] Dec 01 08:11:31 crc kubenswrapper[4822]: E1201 08:11:31.468985 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="extract-content" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469043 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="extract-content" Dec 01 08:11:31 crc kubenswrapper[4822]: E1201 08:11:31.469108 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="registry-server" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469129 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="registry-server" Dec 01 08:11:31 crc kubenswrapper[4822]: E1201 08:11:31.469167 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="extract-utilities" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469178 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="extract-utilities" Dec 01 08:11:31 crc kubenswrapper[4822]: E1201 08:11:31.469202 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="registry-server" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469212 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="registry-server" Dec 01 08:11:31 crc kubenswrapper[4822]: E1201 08:11:31.469234 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="extract-content" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469243 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="extract-content" Dec 01 08:11:31 crc kubenswrapper[4822]: E1201 08:11:31.469261 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="extract-utilities" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469270 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="extract-utilities" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469537 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a73b060-2595-4c1f-8543-8fb85a533f47" containerName="registry-server" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.469591 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="08b00ada-980d-43f0-a950-22f75052eff4" containerName="registry-server" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.471183 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.473900 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-utilities\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.474061 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtjlp\" (UniqueName: \"kubernetes.io/projected/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-kube-api-access-mtjlp\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.474221 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-catalog-content\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.490625 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bprw7"] Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.575172 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-catalog-content\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.575273 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-utilities\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.575320 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtjlp\" (UniqueName: \"kubernetes.io/projected/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-kube-api-access-mtjlp\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.575630 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-catalog-content\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.576289 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-utilities\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.603566 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtjlp\" (UniqueName: \"kubernetes.io/projected/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-kube-api-access-mtjlp\") pod \"community-operators-bprw7\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:31 crc kubenswrapper[4822]: I1201 08:11:31.804511 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:32 crc kubenswrapper[4822]: I1201 08:11:32.148538 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bprw7"] Dec 01 08:11:32 crc kubenswrapper[4822]: I1201 08:11:32.211721 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bprw7" event={"ID":"19f80fb8-8f1f-44f7-8b15-9ce99f84074a","Type":"ContainerStarted","Data":"ac218ef7dd22afb65a97b83f8ffd003ef728dbd50fb1d992b934a0c552e2e997"} Dec 01 08:11:33 crc kubenswrapper[4822]: I1201 08:11:33.221443 4822 generic.go:334] "Generic (PLEG): container finished" podID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerID="98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976" exitCode=0 Dec 01 08:11:33 crc kubenswrapper[4822]: I1201 08:11:33.221651 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bprw7" event={"ID":"19f80fb8-8f1f-44f7-8b15-9ce99f84074a","Type":"ContainerDied","Data":"98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976"} Dec 01 08:11:35 crc kubenswrapper[4822]: I1201 08:11:35.245311 4822 generic.go:334] "Generic (PLEG): container finished" podID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerID="e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070" exitCode=0 Dec 01 08:11:35 crc kubenswrapper[4822]: I1201 08:11:35.245607 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bprw7" event={"ID":"19f80fb8-8f1f-44f7-8b15-9ce99f84074a","Type":"ContainerDied","Data":"e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070"} Dec 01 08:11:36 crc kubenswrapper[4822]: I1201 08:11:36.257296 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bprw7" event={"ID":"19f80fb8-8f1f-44f7-8b15-9ce99f84074a","Type":"ContainerStarted","Data":"a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2"} Dec 01 08:11:36 crc kubenswrapper[4822]: I1201 08:11:36.285219 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bprw7" podStartSLOduration=2.590636684 podStartE2EDuration="5.28519851s" podCreationTimestamp="2025-12-01 08:11:31 +0000 UTC" firstStartedPulling="2025-12-01 08:11:33.223753635 +0000 UTC m=+4848.544561341" lastFinishedPulling="2025-12-01 08:11:35.918315451 +0000 UTC m=+4851.239123167" observedRunningTime="2025-12-01 08:11:36.282318187 +0000 UTC m=+4851.603125883" watchObservedRunningTime="2025-12-01 08:11:36.28519851 +0000 UTC m=+4851.606006196" Dec 01 08:11:41 crc kubenswrapper[4822]: I1201 08:11:41.805761 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:41 crc kubenswrapper[4822]: I1201 08:11:41.806149 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:41 crc kubenswrapper[4822]: I1201 08:11:41.882944 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:42 crc kubenswrapper[4822]: I1201 08:11:42.370009 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:42 crc kubenswrapper[4822]: I1201 08:11:42.430179 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bprw7"] Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.331897 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bprw7" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="registry-server" containerID="cri-o://a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2" gracePeriod=2 Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.831430 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.887348 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-utilities\") pod \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.887472 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-catalog-content\") pod \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.887620 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtjlp\" (UniqueName: \"kubernetes.io/projected/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-kube-api-access-mtjlp\") pod \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\" (UID: \"19f80fb8-8f1f-44f7-8b15-9ce99f84074a\") " Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.894165 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-kube-api-access-mtjlp" (OuterVolumeSpecName: "kube-api-access-mtjlp") pod "19f80fb8-8f1f-44f7-8b15-9ce99f84074a" (UID: "19f80fb8-8f1f-44f7-8b15-9ce99f84074a"). InnerVolumeSpecName "kube-api-access-mtjlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.911727 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-utilities" (OuterVolumeSpecName: "utilities") pod "19f80fb8-8f1f-44f7-8b15-9ce99f84074a" (UID: "19f80fb8-8f1f-44f7-8b15-9ce99f84074a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.989338 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtjlp\" (UniqueName: \"kubernetes.io/projected/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-kube-api-access-mtjlp\") on node \"crc\" DevicePath \"\"" Dec 01 08:11:44 crc kubenswrapper[4822]: I1201 08:11:44.989371 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.015026 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19f80fb8-8f1f-44f7-8b15-9ce99f84074a" (UID: "19f80fb8-8f1f-44f7-8b15-9ce99f84074a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.089905 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19f80fb8-8f1f-44f7-8b15-9ce99f84074a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.345109 4822 generic.go:334] "Generic (PLEG): container finished" podID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerID="a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2" exitCode=0 Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.345176 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bprw7" event={"ID":"19f80fb8-8f1f-44f7-8b15-9ce99f84074a","Type":"ContainerDied","Data":"a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2"} Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.345230 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bprw7" event={"ID":"19f80fb8-8f1f-44f7-8b15-9ce99f84074a","Type":"ContainerDied","Data":"ac218ef7dd22afb65a97b83f8ffd003ef728dbd50fb1d992b934a0c552e2e997"} Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.345262 4822 scope.go:117] "RemoveContainer" containerID="a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.345259 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bprw7" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.380494 4822 scope.go:117] "RemoveContainer" containerID="e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.414392 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bprw7"] Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.426830 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bprw7"] Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.435410 4822 scope.go:117] "RemoveContainer" containerID="98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.463849 4822 scope.go:117] "RemoveContainer" containerID="a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2" Dec 01 08:11:45 crc kubenswrapper[4822]: E1201 08:11:45.464454 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2\": container with ID starting with a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2 not found: ID does not exist" containerID="a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.464509 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2"} err="failed to get container status \"a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2\": rpc error: code = NotFound desc = could not find container \"a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2\": container with ID starting with a44c9ff2c7486dcf40f4824855e65e6e6d644347d8ab8c054128a1febbd6cad2 not found: ID does not exist" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.464539 4822 scope.go:117] "RemoveContainer" containerID="e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070" Dec 01 08:11:45 crc kubenswrapper[4822]: E1201 08:11:45.464977 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070\": container with ID starting with e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070 not found: ID does not exist" containerID="e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.465005 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070"} err="failed to get container status \"e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070\": rpc error: code = NotFound desc = could not find container \"e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070\": container with ID starting with e55cb66329935746082a9da6fb00e7178bb86f2606004d96780bc90812475070 not found: ID does not exist" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.465024 4822 scope.go:117] "RemoveContainer" containerID="98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976" Dec 01 08:11:45 crc kubenswrapper[4822]: E1201 08:11:45.465457 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976\": container with ID starting with 98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976 not found: ID does not exist" containerID="98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976" Dec 01 08:11:45 crc kubenswrapper[4822]: I1201 08:11:45.465490 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976"} err="failed to get container status \"98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976\": rpc error: code = NotFound desc = could not find container \"98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976\": container with ID starting with 98e4b19659e7745caed85337dd5640b43dd9c71cadf0295e37786f9ee01d3976 not found: ID does not exist" Dec 01 08:11:46 crc kubenswrapper[4822]: I1201 08:11:46.967969 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" path="/var/lib/kubelet/pods/19f80fb8-8f1f-44f7-8b15-9ce99f84074a/volumes" Dec 01 08:13:12 crc kubenswrapper[4822]: I1201 08:13:12.543356 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:13:12 crc kubenswrapper[4822]: I1201 08:13:12.544178 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:13:42 crc kubenswrapper[4822]: I1201 08:13:42.542957 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:13:42 crc kubenswrapper[4822]: I1201 08:13:42.543693 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:14:12 crc kubenswrapper[4822]: I1201 08:14:12.543319 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:14:12 crc kubenswrapper[4822]: I1201 08:14:12.543876 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:14:12 crc kubenswrapper[4822]: I1201 08:14:12.543941 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 08:14:12 crc kubenswrapper[4822]: I1201 08:14:12.544628 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 08:14:12 crc kubenswrapper[4822]: I1201 08:14:12.544717 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" gracePeriod=600 Dec 01 08:14:13 crc kubenswrapper[4822]: E1201 08:14:13.641893 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:14:13 crc kubenswrapper[4822]: I1201 08:14:13.714523 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" exitCode=0 Dec 01 08:14:13 crc kubenswrapper[4822]: I1201 08:14:13.714577 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856"} Dec 01 08:14:13 crc kubenswrapper[4822]: I1201 08:14:13.714630 4822 scope.go:117] "RemoveContainer" containerID="1c67073cec33e28a0f6fef7f6096b7299ad7147babf07318be6bdf1c58b46d4b" Dec 01 08:14:13 crc kubenswrapper[4822]: I1201 08:14:13.715171 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:14:13 crc kubenswrapper[4822]: E1201 08:14:13.715387 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:14:26 crc kubenswrapper[4822]: I1201 08:14:26.951010 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:14:26 crc kubenswrapper[4822]: E1201 08:14:26.952045 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:14:40 crc kubenswrapper[4822]: I1201 08:14:40.951301 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:14:40 crc kubenswrapper[4822]: E1201 08:14:40.952780 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:14:52 crc kubenswrapper[4822]: I1201 08:14:52.951141 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:14:52 crc kubenswrapper[4822]: E1201 08:14:52.952071 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.163780 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7"] Dec 01 08:15:00 crc kubenswrapper[4822]: E1201 08:15:00.164735 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="extract-content" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.164760 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="extract-content" Dec 01 08:15:00 crc kubenswrapper[4822]: E1201 08:15:00.164784 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="registry-server" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.164792 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="registry-server" Dec 01 08:15:00 crc kubenswrapper[4822]: E1201 08:15:00.164805 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="extract-utilities" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.164811 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="extract-utilities" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.165043 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="19f80fb8-8f1f-44f7-8b15-9ce99f84074a" containerName="registry-server" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.165680 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.168513 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.168592 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.182461 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7"] Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.249998 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f261545-c3c7-40df-86a7-62d3a44f0a9a-secret-volume\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.250072 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f261545-c3c7-40df-86a7-62d3a44f0a9a-config-volume\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.250107 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drnnj\" (UniqueName: \"kubernetes.io/projected/4f261545-c3c7-40df-86a7-62d3a44f0a9a-kube-api-access-drnnj\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.351975 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f261545-c3c7-40df-86a7-62d3a44f0a9a-secret-volume\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.352038 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f261545-c3c7-40df-86a7-62d3a44f0a9a-config-volume\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.352062 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drnnj\" (UniqueName: \"kubernetes.io/projected/4f261545-c3c7-40df-86a7-62d3a44f0a9a-kube-api-access-drnnj\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.353351 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f261545-c3c7-40df-86a7-62d3a44f0a9a-config-volume\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.364399 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f261545-c3c7-40df-86a7-62d3a44f0a9a-secret-volume\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.372496 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drnnj\" (UniqueName: \"kubernetes.io/projected/4f261545-c3c7-40df-86a7-62d3a44f0a9a-kube-api-access-drnnj\") pod \"collect-profiles-29409615-nmxv7\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.490568 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:00 crc kubenswrapper[4822]: I1201 08:15:00.727155 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7"] Dec 01 08:15:01 crc kubenswrapper[4822]: I1201 08:15:01.156429 4822 generic.go:334] "Generic (PLEG): container finished" podID="4f261545-c3c7-40df-86a7-62d3a44f0a9a" containerID="c0ca721179025f4b7fff7661735173ae5a45d7120b360045d062580e1f48d937" exitCode=0 Dec 01 08:15:01 crc kubenswrapper[4822]: I1201 08:15:01.156482 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" event={"ID":"4f261545-c3c7-40df-86a7-62d3a44f0a9a","Type":"ContainerDied","Data":"c0ca721179025f4b7fff7661735173ae5a45d7120b360045d062580e1f48d937"} Dec 01 08:15:01 crc kubenswrapper[4822]: I1201 08:15:01.156518 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" event={"ID":"4f261545-c3c7-40df-86a7-62d3a44f0a9a","Type":"ContainerStarted","Data":"33e6bfac9d7f2e10f56c726fcec6de04f071fb6b32bfddf2b4fe8134d101ed5e"} Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.608339 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.683538 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drnnj\" (UniqueName: \"kubernetes.io/projected/4f261545-c3c7-40df-86a7-62d3a44f0a9a-kube-api-access-drnnj\") pod \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.683706 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f261545-c3c7-40df-86a7-62d3a44f0a9a-secret-volume\") pod \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.683733 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f261545-c3c7-40df-86a7-62d3a44f0a9a-config-volume\") pod \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\" (UID: \"4f261545-c3c7-40df-86a7-62d3a44f0a9a\") " Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.684590 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f261545-c3c7-40df-86a7-62d3a44f0a9a-config-volume" (OuterVolumeSpecName: "config-volume") pod "4f261545-c3c7-40df-86a7-62d3a44f0a9a" (UID: "4f261545-c3c7-40df-86a7-62d3a44f0a9a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.684800 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f261545-c3c7-40df-86a7-62d3a44f0a9a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.689947 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f261545-c3c7-40df-86a7-62d3a44f0a9a-kube-api-access-drnnj" (OuterVolumeSpecName: "kube-api-access-drnnj") pod "4f261545-c3c7-40df-86a7-62d3a44f0a9a" (UID: "4f261545-c3c7-40df-86a7-62d3a44f0a9a"). InnerVolumeSpecName "kube-api-access-drnnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.691242 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f261545-c3c7-40df-86a7-62d3a44f0a9a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4f261545-c3c7-40df-86a7-62d3a44f0a9a" (UID: "4f261545-c3c7-40df-86a7-62d3a44f0a9a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.785669 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f261545-c3c7-40df-86a7-62d3a44f0a9a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 08:15:02 crc kubenswrapper[4822]: I1201 08:15:02.785705 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drnnj\" (UniqueName: \"kubernetes.io/projected/4f261545-c3c7-40df-86a7-62d3a44f0a9a-kube-api-access-drnnj\") on node \"crc\" DevicePath \"\"" Dec 01 08:15:03 crc kubenswrapper[4822]: I1201 08:15:03.174830 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" event={"ID":"4f261545-c3c7-40df-86a7-62d3a44f0a9a","Type":"ContainerDied","Data":"33e6bfac9d7f2e10f56c726fcec6de04f071fb6b32bfddf2b4fe8134d101ed5e"} Dec 01 08:15:03 crc kubenswrapper[4822]: I1201 08:15:03.174872 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33e6bfac9d7f2e10f56c726fcec6de04f071fb6b32bfddf2b4fe8134d101ed5e" Dec 01 08:15:03 crc kubenswrapper[4822]: I1201 08:15:03.175263 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409615-nmxv7" Dec 01 08:15:03 crc kubenswrapper[4822]: I1201 08:15:03.721471 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb"] Dec 01 08:15:03 crc kubenswrapper[4822]: I1201 08:15:03.732741 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-dcbjb"] Dec 01 08:15:04 crc kubenswrapper[4822]: I1201 08:15:04.959024 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:15:04 crc kubenswrapper[4822]: E1201 08:15:04.959912 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:15:04 crc kubenswrapper[4822]: I1201 08:15:04.965623 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce" path="/var/lib/kubelet/pods/d4ceca39-ee9e-4c6f-8d01-a4fecae0c9ce/volumes" Dec 01 08:15:16 crc kubenswrapper[4822]: I1201 08:15:16.951537 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:15:16 crc kubenswrapper[4822]: E1201 08:15:16.952696 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:15:27 crc kubenswrapper[4822]: I1201 08:15:27.950902 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:15:27 crc kubenswrapper[4822]: E1201 08:15:27.951713 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:15:38 crc kubenswrapper[4822]: I1201 08:15:38.951420 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:15:38 crc kubenswrapper[4822]: E1201 08:15:38.952747 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:15:49 crc kubenswrapper[4822]: I1201 08:15:49.951545 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:15:49 crc kubenswrapper[4822]: E1201 08:15:49.952741 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:15:51 crc kubenswrapper[4822]: I1201 08:15:51.585977 4822 scope.go:117] "RemoveContainer" containerID="94f016207beed4324ad054f7727fc37b96ba33888fa8a62008a36c251df8211b" Dec 01 08:16:02 crc kubenswrapper[4822]: I1201 08:16:02.951373 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:16:02 crc kubenswrapper[4822]: E1201 08:16:02.952241 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:16:15 crc kubenswrapper[4822]: I1201 08:16:15.951436 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:16:15 crc kubenswrapper[4822]: E1201 08:16:15.952681 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:16:27 crc kubenswrapper[4822]: I1201 08:16:27.951592 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:16:27 crc kubenswrapper[4822]: E1201 08:16:27.952231 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:16:41 crc kubenswrapper[4822]: I1201 08:16:41.951578 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:16:41 crc kubenswrapper[4822]: E1201 08:16:41.952805 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:16:54 crc kubenswrapper[4822]: I1201 08:16:54.961715 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:16:54 crc kubenswrapper[4822]: E1201 08:16:54.962922 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:17:06 crc kubenswrapper[4822]: I1201 08:17:06.952130 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:17:06 crc kubenswrapper[4822]: E1201 08:17:06.953247 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:17:17 crc kubenswrapper[4822]: I1201 08:17:17.950445 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:17:17 crc kubenswrapper[4822]: E1201 08:17:17.951330 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:17:28 crc kubenswrapper[4822]: I1201 08:17:28.950780 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:17:28 crc kubenswrapper[4822]: E1201 08:17:28.952343 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:17:42 crc kubenswrapper[4822]: I1201 08:17:42.951061 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:17:42 crc kubenswrapper[4822]: E1201 08:17:42.951776 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:17:55 crc kubenswrapper[4822]: I1201 08:17:55.951674 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:17:55 crc kubenswrapper[4822]: E1201 08:17:55.952771 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:18:06 crc kubenswrapper[4822]: I1201 08:18:06.950346 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:18:06 crc kubenswrapper[4822]: E1201 08:18:06.951179 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:18:20 crc kubenswrapper[4822]: I1201 08:18:20.951429 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:18:20 crc kubenswrapper[4822]: E1201 08:18:20.952393 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.649226 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qbrdk"] Dec 01 08:18:26 crc kubenswrapper[4822]: E1201 08:18:26.650168 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f261545-c3c7-40df-86a7-62d3a44f0a9a" containerName="collect-profiles" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.650189 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f261545-c3c7-40df-86a7-62d3a44f0a9a" containerName="collect-profiles" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.650422 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f261545-c3c7-40df-86a7-62d3a44f0a9a" containerName="collect-profiles" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.651659 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.727407 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbrdk"] Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.801389 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-utilities\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.801454 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h47v5\" (UniqueName: \"kubernetes.io/projected/64a00882-0d43-4f6c-8824-5da71e0c3c5b-kube-api-access-h47v5\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.801886 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-catalog-content\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.903525 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h47v5\" (UniqueName: \"kubernetes.io/projected/64a00882-0d43-4f6c-8824-5da71e0c3c5b-kube-api-access-h47v5\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.903636 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-catalog-content\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.903792 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-utilities\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.904298 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-catalog-content\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.904345 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-utilities\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.924676 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h47v5\" (UniqueName: \"kubernetes.io/projected/64a00882-0d43-4f6c-8824-5da71e0c3c5b-kube-api-access-h47v5\") pod \"redhat-marketplace-qbrdk\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:26 crc kubenswrapper[4822]: I1201 08:18:26.977869 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:27 crc kubenswrapper[4822]: I1201 08:18:27.476766 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbrdk"] Dec 01 08:18:28 crc kubenswrapper[4822]: I1201 08:18:28.148646 4822 generic.go:334] "Generic (PLEG): container finished" podID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerID="646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220" exitCode=0 Dec 01 08:18:28 crc kubenswrapper[4822]: I1201 08:18:28.148750 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerDied","Data":"646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220"} Dec 01 08:18:28 crc kubenswrapper[4822]: I1201 08:18:28.148960 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerStarted","Data":"057f4d9e1f225d09d247053a6f196622a1ee18ccc9874931afbe6aa7913dbef5"} Dec 01 08:18:28 crc kubenswrapper[4822]: I1201 08:18:28.151362 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 08:18:29 crc kubenswrapper[4822]: I1201 08:18:29.161652 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerStarted","Data":"8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950"} Dec 01 08:18:30 crc kubenswrapper[4822]: I1201 08:18:30.169758 4822 generic.go:334] "Generic (PLEG): container finished" podID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerID="8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950" exitCode=0 Dec 01 08:18:30 crc kubenswrapper[4822]: I1201 08:18:30.169891 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerDied","Data":"8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950"} Dec 01 08:18:31 crc kubenswrapper[4822]: I1201 08:18:31.179235 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerStarted","Data":"c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7"} Dec 01 08:18:31 crc kubenswrapper[4822]: I1201 08:18:31.203479 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qbrdk" podStartSLOduration=2.691081672 podStartE2EDuration="5.203455528s" podCreationTimestamp="2025-12-01 08:18:26 +0000 UTC" firstStartedPulling="2025-12-01 08:18:28.15091192 +0000 UTC m=+5263.471719636" lastFinishedPulling="2025-12-01 08:18:30.663285766 +0000 UTC m=+5265.984093492" observedRunningTime="2025-12-01 08:18:31.196993466 +0000 UTC m=+5266.517801202" watchObservedRunningTime="2025-12-01 08:18:31.203455528 +0000 UTC m=+5266.524263224" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.027639 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z27tr"] Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.030153 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.040472 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z27tr"] Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.132209 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-utilities\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.132421 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpwt8\" (UniqueName: \"kubernetes.io/projected/609ae30b-2863-4717-aa78-e0f26df6a292-kube-api-access-zpwt8\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.132830 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-catalog-content\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.234343 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpwt8\" (UniqueName: \"kubernetes.io/projected/609ae30b-2863-4717-aa78-e0f26df6a292-kube-api-access-zpwt8\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.234468 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-catalog-content\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.234520 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-utilities\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.235201 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-utilities\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.235360 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-catalog-content\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.261751 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpwt8\" (UniqueName: \"kubernetes.io/projected/609ae30b-2863-4717-aa78-e0f26df6a292-kube-api-access-zpwt8\") pod \"redhat-operators-z27tr\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.374379 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.793458 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z27tr"] Dec 01 08:18:34 crc kubenswrapper[4822]: I1201 08:18:34.956389 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:18:34 crc kubenswrapper[4822]: E1201 08:18:34.956744 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:18:35 crc kubenswrapper[4822]: I1201 08:18:35.216296 4822 generic.go:334] "Generic (PLEG): container finished" podID="609ae30b-2863-4717-aa78-e0f26df6a292" containerID="53b911ccff76e9f1b90b140152098dff1ad10a71a2cf547035e29c14d78f1847" exitCode=0 Dec 01 08:18:35 crc kubenswrapper[4822]: I1201 08:18:35.216333 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27tr" event={"ID":"609ae30b-2863-4717-aa78-e0f26df6a292","Type":"ContainerDied","Data":"53b911ccff76e9f1b90b140152098dff1ad10a71a2cf547035e29c14d78f1847"} Dec 01 08:18:35 crc kubenswrapper[4822]: I1201 08:18:35.216358 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27tr" event={"ID":"609ae30b-2863-4717-aa78-e0f26df6a292","Type":"ContainerStarted","Data":"f657d367a51d4a6a3027598747209911c92d10da592b5c8b26defd2beedf091d"} Dec 01 08:18:36 crc kubenswrapper[4822]: I1201 08:18:36.978122 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:36 crc kubenswrapper[4822]: I1201 08:18:36.979263 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:37 crc kubenswrapper[4822]: I1201 08:18:37.040773 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:37 crc kubenswrapper[4822]: I1201 08:18:37.236249 4822 generic.go:334] "Generic (PLEG): container finished" podID="609ae30b-2863-4717-aa78-e0f26df6a292" containerID="148118c0fa87f1ae88e22bb0ec10f4dcb40fd45d8262c1f0e4a5553de8049dac" exitCode=0 Dec 01 08:18:37 crc kubenswrapper[4822]: I1201 08:18:37.236347 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27tr" event={"ID":"609ae30b-2863-4717-aa78-e0f26df6a292","Type":"ContainerDied","Data":"148118c0fa87f1ae88e22bb0ec10f4dcb40fd45d8262c1f0e4a5553de8049dac"} Dec 01 08:18:37 crc kubenswrapper[4822]: I1201 08:18:37.303204 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:38 crc kubenswrapper[4822]: I1201 08:18:38.255729 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27tr" event={"ID":"609ae30b-2863-4717-aa78-e0f26df6a292","Type":"ContainerStarted","Data":"ce0742f0432b72ad57fc8bf84cd93b8bddd5771c891d297a913d33f0eaa46a5d"} Dec 01 08:18:38 crc kubenswrapper[4822]: I1201 08:18:38.288140 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z27tr" podStartSLOduration=2.665673847 podStartE2EDuration="5.28802704s" podCreationTimestamp="2025-12-01 08:18:33 +0000 UTC" firstStartedPulling="2025-12-01 08:18:35.217988027 +0000 UTC m=+5270.538795713" lastFinishedPulling="2025-12-01 08:18:37.84034118 +0000 UTC m=+5273.161148906" observedRunningTime="2025-12-01 08:18:38.274070325 +0000 UTC m=+5273.594878061" watchObservedRunningTime="2025-12-01 08:18:38.28802704 +0000 UTC m=+5273.608834746" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.008177 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbrdk"] Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.280196 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qbrdk" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="registry-server" containerID="cri-o://c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7" gracePeriod=2 Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.720412 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.827109 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-utilities\") pod \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.827263 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h47v5\" (UniqueName: \"kubernetes.io/projected/64a00882-0d43-4f6c-8824-5da71e0c3c5b-kube-api-access-h47v5\") pod \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.827393 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-catalog-content\") pod \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\" (UID: \"64a00882-0d43-4f6c-8824-5da71e0c3c5b\") " Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.828644 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-utilities" (OuterVolumeSpecName: "utilities") pod "64a00882-0d43-4f6c-8824-5da71e0c3c5b" (UID: "64a00882-0d43-4f6c-8824-5da71e0c3c5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.832714 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64a00882-0d43-4f6c-8824-5da71e0c3c5b-kube-api-access-h47v5" (OuterVolumeSpecName: "kube-api-access-h47v5") pod "64a00882-0d43-4f6c-8824-5da71e0c3c5b" (UID: "64a00882-0d43-4f6c-8824-5da71e0c3c5b"). InnerVolumeSpecName "kube-api-access-h47v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.862935 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64a00882-0d43-4f6c-8824-5da71e0c3c5b" (UID: "64a00882-0d43-4f6c-8824-5da71e0c3c5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.928570 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.928608 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h47v5\" (UniqueName: \"kubernetes.io/projected/64a00882-0d43-4f6c-8824-5da71e0c3c5b-kube-api-access-h47v5\") on node \"crc\" DevicePath \"\"" Dec 01 08:18:39 crc kubenswrapper[4822]: I1201 08:18:39.928621 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64a00882-0d43-4f6c-8824-5da71e0c3c5b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.293097 4822 generic.go:334] "Generic (PLEG): container finished" podID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerID="c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7" exitCode=0 Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.293161 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerDied","Data":"c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7"} Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.293210 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qbrdk" event={"ID":"64a00882-0d43-4f6c-8824-5da71e0c3c5b","Type":"ContainerDied","Data":"057f4d9e1f225d09d247053a6f196622a1ee18ccc9874931afbe6aa7913dbef5"} Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.293211 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qbrdk" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.293287 4822 scope.go:117] "RemoveContainer" containerID="c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.317076 4822 scope.go:117] "RemoveContainer" containerID="8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.346209 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbrdk"] Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.353634 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qbrdk"] Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.365600 4822 scope.go:117] "RemoveContainer" containerID="646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.388069 4822 scope.go:117] "RemoveContainer" containerID="c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7" Dec 01 08:18:40 crc kubenswrapper[4822]: E1201 08:18:40.394274 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7\": container with ID starting with c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7 not found: ID does not exist" containerID="c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.394415 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7"} err="failed to get container status \"c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7\": rpc error: code = NotFound desc = could not find container \"c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7\": container with ID starting with c1083750517aeac0d7aa0b73f894d5e43665751c20eb30f6a321ce49cb1fe5d7 not found: ID does not exist" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.394496 4822 scope.go:117] "RemoveContainer" containerID="8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950" Dec 01 08:18:40 crc kubenswrapper[4822]: E1201 08:18:40.406212 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950\": container with ID starting with 8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950 not found: ID does not exist" containerID="8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.406268 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950"} err="failed to get container status \"8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950\": rpc error: code = NotFound desc = could not find container \"8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950\": container with ID starting with 8b2f4412a20dc849def6fa7b0a0c37e606fed1c84313e3d95321b2f8b505f950 not found: ID does not exist" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.406309 4822 scope.go:117] "RemoveContainer" containerID="646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220" Dec 01 08:18:40 crc kubenswrapper[4822]: E1201 08:18:40.406853 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220\": container with ID starting with 646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220 not found: ID does not exist" containerID="646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.406950 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220"} err="failed to get container status \"646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220\": rpc error: code = NotFound desc = could not find container \"646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220\": container with ID starting with 646ed85a792750ae768f85a6a441b0fd701c307df28d1ea7536a605a2bb0e220 not found: ID does not exist" Dec 01 08:18:40 crc kubenswrapper[4822]: I1201 08:18:40.962520 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" path="/var/lib/kubelet/pods/64a00882-0d43-4f6c-8824-5da71e0c3c5b/volumes" Dec 01 08:18:44 crc kubenswrapper[4822]: I1201 08:18:44.374878 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:44 crc kubenswrapper[4822]: I1201 08:18:44.375218 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:45 crc kubenswrapper[4822]: I1201 08:18:45.421477 4822 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z27tr" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="registry-server" probeResult="failure" output=< Dec 01 08:18:45 crc kubenswrapper[4822]: timeout: failed to connect service ":50051" within 1s Dec 01 08:18:45 crc kubenswrapper[4822]: > Dec 01 08:18:48 crc kubenswrapper[4822]: I1201 08:18:48.951822 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:18:48 crc kubenswrapper[4822]: E1201 08:18:48.952693 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:18:54 crc kubenswrapper[4822]: I1201 08:18:54.456260 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:54 crc kubenswrapper[4822]: I1201 08:18:54.529675 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:54 crc kubenswrapper[4822]: I1201 08:18:54.720751 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z27tr"] Dec 01 08:18:56 crc kubenswrapper[4822]: I1201 08:18:56.443712 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z27tr" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="registry-server" containerID="cri-o://ce0742f0432b72ad57fc8bf84cd93b8bddd5771c891d297a913d33f0eaa46a5d" gracePeriod=2 Dec 01 08:18:57 crc kubenswrapper[4822]: I1201 08:18:57.458671 4822 generic.go:334] "Generic (PLEG): container finished" podID="609ae30b-2863-4717-aa78-e0f26df6a292" containerID="ce0742f0432b72ad57fc8bf84cd93b8bddd5771c891d297a913d33f0eaa46a5d" exitCode=0 Dec 01 08:18:57 crc kubenswrapper[4822]: I1201 08:18:57.458766 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27tr" event={"ID":"609ae30b-2863-4717-aa78-e0f26df6a292","Type":"ContainerDied","Data":"ce0742f0432b72ad57fc8bf84cd93b8bddd5771c891d297a913d33f0eaa46a5d"} Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.031142 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.076622 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpwt8\" (UniqueName: \"kubernetes.io/projected/609ae30b-2863-4717-aa78-e0f26df6a292-kube-api-access-zpwt8\") pod \"609ae30b-2863-4717-aa78-e0f26df6a292\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.077215 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-utilities\") pod \"609ae30b-2863-4717-aa78-e0f26df6a292\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.077487 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-catalog-content\") pod \"609ae30b-2863-4717-aa78-e0f26df6a292\" (UID: \"609ae30b-2863-4717-aa78-e0f26df6a292\") " Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.078580 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-utilities" (OuterVolumeSpecName: "utilities") pod "609ae30b-2863-4717-aa78-e0f26df6a292" (UID: "609ae30b-2863-4717-aa78-e0f26df6a292"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.085154 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/609ae30b-2863-4717-aa78-e0f26df6a292-kube-api-access-zpwt8" (OuterVolumeSpecName: "kube-api-access-zpwt8") pod "609ae30b-2863-4717-aa78-e0f26df6a292" (UID: "609ae30b-2863-4717-aa78-e0f26df6a292"). InnerVolumeSpecName "kube-api-access-zpwt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.180214 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpwt8\" (UniqueName: \"kubernetes.io/projected/609ae30b-2863-4717-aa78-e0f26df6a292-kube-api-access-zpwt8\") on node \"crc\" DevicePath \"\"" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.180266 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.219001 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "609ae30b-2863-4717-aa78-e0f26df6a292" (UID: "609ae30b-2863-4717-aa78-e0f26df6a292"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.281908 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/609ae30b-2863-4717-aa78-e0f26df6a292-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.469309 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z27tr" event={"ID":"609ae30b-2863-4717-aa78-e0f26df6a292","Type":"ContainerDied","Data":"f657d367a51d4a6a3027598747209911c92d10da592b5c8b26defd2beedf091d"} Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.469674 4822 scope.go:117] "RemoveContainer" containerID="ce0742f0432b72ad57fc8bf84cd93b8bddd5771c891d297a913d33f0eaa46a5d" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.469371 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z27tr" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.501246 4822 scope.go:117] "RemoveContainer" containerID="148118c0fa87f1ae88e22bb0ec10f4dcb40fd45d8262c1f0e4a5553de8049dac" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.512194 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z27tr"] Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.523304 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z27tr"] Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.527980 4822 scope.go:117] "RemoveContainer" containerID="53b911ccff76e9f1b90b140152098dff1ad10a71a2cf547035e29c14d78f1847" Dec 01 08:18:58 crc kubenswrapper[4822]: I1201 08:18:58.962564 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" path="/var/lib/kubelet/pods/609ae30b-2863-4717-aa78-e0f26df6a292/volumes" Dec 01 08:19:00 crc kubenswrapper[4822]: I1201 08:19:00.950214 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:19:00 crc kubenswrapper[4822]: E1201 08:19:00.950737 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:19:13 crc kubenswrapper[4822]: I1201 08:19:13.951669 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:19:14 crc kubenswrapper[4822]: I1201 08:19:14.658189 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"8c3304b89b43cb000a7e238f03a5a29d898f7d9ce4e8b306ae16df08e8c3d064"} Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.598479 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-85clr"] Dec 01 08:19:30 crc kubenswrapper[4822]: E1201 08:19:30.599498 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="extract-content" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599522 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="extract-content" Dec 01 08:19:30 crc kubenswrapper[4822]: E1201 08:19:30.599572 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="registry-server" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599582 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="registry-server" Dec 01 08:19:30 crc kubenswrapper[4822]: E1201 08:19:30.599598 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="registry-server" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599607 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="registry-server" Dec 01 08:19:30 crc kubenswrapper[4822]: E1201 08:19:30.599630 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="extract-utilities" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599639 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="extract-utilities" Dec 01 08:19:30 crc kubenswrapper[4822]: E1201 08:19:30.599655 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="extract-utilities" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599663 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="extract-utilities" Dec 01 08:19:30 crc kubenswrapper[4822]: E1201 08:19:30.599676 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="extract-content" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599682 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="extract-content" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599884 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="64a00882-0d43-4f6c-8824-5da71e0c3c5b" containerName="registry-server" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.599905 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="609ae30b-2863-4717-aa78-e0f26df6a292" containerName="registry-server" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.601195 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.607381 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-85clr"] Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.652871 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ng6lt\" (UniqueName: \"kubernetes.io/projected/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-kube-api-access-ng6lt\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.652948 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-catalog-content\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.653013 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-utilities\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.754203 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng6lt\" (UniqueName: \"kubernetes.io/projected/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-kube-api-access-ng6lt\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.754289 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-catalog-content\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.754353 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-utilities\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.754980 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-utilities\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.755077 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-catalog-content\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.776091 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ng6lt\" (UniqueName: \"kubernetes.io/projected/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-kube-api-access-ng6lt\") pod \"certified-operators-85clr\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:30 crc kubenswrapper[4822]: I1201 08:19:30.923944 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:31 crc kubenswrapper[4822]: W1201 08:19:31.274484 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice/crio-53d01ae6b2d2d25945fb62b3be4771f54710297769c8b36b9f19da50d46d7a86 WatchSource:0}: Error finding container 53d01ae6b2d2d25945fb62b3be4771f54710297769c8b36b9f19da50d46d7a86: Status 404 returned error can't find the container with id 53d01ae6b2d2d25945fb62b3be4771f54710297769c8b36b9f19da50d46d7a86 Dec 01 08:19:31 crc kubenswrapper[4822]: I1201 08:19:31.278443 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-85clr"] Dec 01 08:19:31 crc kubenswrapper[4822]: I1201 08:19:31.806151 4822 generic.go:334] "Generic (PLEG): container finished" podID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerID="a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184" exitCode=0 Dec 01 08:19:31 crc kubenswrapper[4822]: I1201 08:19:31.806190 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85clr" event={"ID":"2b9f6fd7-a630-413d-8e1e-615ae1e552e1","Type":"ContainerDied","Data":"a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184"} Dec 01 08:19:31 crc kubenswrapper[4822]: I1201 08:19:31.806458 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85clr" event={"ID":"2b9f6fd7-a630-413d-8e1e-615ae1e552e1","Type":"ContainerStarted","Data":"53d01ae6b2d2d25945fb62b3be4771f54710297769c8b36b9f19da50d46d7a86"} Dec 01 08:19:33 crc kubenswrapper[4822]: I1201 08:19:33.822351 4822 generic.go:334] "Generic (PLEG): container finished" podID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerID="c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c" exitCode=0 Dec 01 08:19:33 crc kubenswrapper[4822]: I1201 08:19:33.822435 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85clr" event={"ID":"2b9f6fd7-a630-413d-8e1e-615ae1e552e1","Type":"ContainerDied","Data":"c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c"} Dec 01 08:19:34 crc kubenswrapper[4822]: I1201 08:19:34.831646 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85clr" event={"ID":"2b9f6fd7-a630-413d-8e1e-615ae1e552e1","Type":"ContainerStarted","Data":"bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596"} Dec 01 08:19:34 crc kubenswrapper[4822]: I1201 08:19:34.849840 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-85clr" podStartSLOduration=2.302075054 podStartE2EDuration="4.849816469s" podCreationTimestamp="2025-12-01 08:19:30 +0000 UTC" firstStartedPulling="2025-12-01 08:19:31.808437246 +0000 UTC m=+5327.129244932" lastFinishedPulling="2025-12-01 08:19:34.356178651 +0000 UTC m=+5329.676986347" observedRunningTime="2025-12-01 08:19:34.84843191 +0000 UTC m=+5330.169239596" watchObservedRunningTime="2025-12-01 08:19:34.849816469 +0000 UTC m=+5330.170624155" Dec 01 08:19:40 crc kubenswrapper[4822]: I1201 08:19:40.924264 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:40 crc kubenswrapper[4822]: I1201 08:19:40.924760 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:40 crc kubenswrapper[4822]: I1201 08:19:40.965371 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:41 crc kubenswrapper[4822]: I1201 08:19:41.943042 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:41 crc kubenswrapper[4822]: I1201 08:19:41.992505 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-85clr"] Dec 01 08:19:43 crc kubenswrapper[4822]: I1201 08:19:43.923096 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-85clr" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="registry-server" containerID="cri-o://bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596" gracePeriod=2 Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.812995 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.875214 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-utilities\") pod \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.875278 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ng6lt\" (UniqueName: \"kubernetes.io/projected/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-kube-api-access-ng6lt\") pod \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.875347 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-catalog-content\") pod \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\" (UID: \"2b9f6fd7-a630-413d-8e1e-615ae1e552e1\") " Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.876219 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-utilities" (OuterVolumeSpecName: "utilities") pod "2b9f6fd7-a630-413d-8e1e-615ae1e552e1" (UID: "2b9f6fd7-a630-413d-8e1e-615ae1e552e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.880666 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-kube-api-access-ng6lt" (OuterVolumeSpecName: "kube-api-access-ng6lt") pod "2b9f6fd7-a630-413d-8e1e-615ae1e552e1" (UID: "2b9f6fd7-a630-413d-8e1e-615ae1e552e1"). InnerVolumeSpecName "kube-api-access-ng6lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.931063 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b9f6fd7-a630-413d-8e1e-615ae1e552e1" (UID: "2b9f6fd7-a630-413d-8e1e-615ae1e552e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.936914 4822 generic.go:334] "Generic (PLEG): container finished" podID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerID="bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596" exitCode=0 Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.936952 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85clr" event={"ID":"2b9f6fd7-a630-413d-8e1e-615ae1e552e1","Type":"ContainerDied","Data":"bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596"} Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.936977 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85clr" event={"ID":"2b9f6fd7-a630-413d-8e1e-615ae1e552e1","Type":"ContainerDied","Data":"53d01ae6b2d2d25945fb62b3be4771f54710297769c8b36b9f19da50d46d7a86"} Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.936994 4822 scope.go:117] "RemoveContainer" containerID="bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.937123 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85clr" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.959426 4822 scope.go:117] "RemoveContainer" containerID="c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.984435 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.984475 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ng6lt\" (UniqueName: \"kubernetes.io/projected/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-kube-api-access-ng6lt\") on node \"crc\" DevicePath \"\"" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.984488 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b9f6fd7-a630-413d-8e1e-615ae1e552e1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.984593 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-85clr"] Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.991108 4822 scope.go:117] "RemoveContainer" containerID="a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184" Dec 01 08:19:44 crc kubenswrapper[4822]: I1201 08:19:44.991176 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-85clr"] Dec 01 08:19:45 crc kubenswrapper[4822]: I1201 08:19:45.015837 4822 scope.go:117] "RemoveContainer" containerID="bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596" Dec 01 08:19:45 crc kubenswrapper[4822]: E1201 08:19:45.016509 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596\": container with ID starting with bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596 not found: ID does not exist" containerID="bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596" Dec 01 08:19:45 crc kubenswrapper[4822]: I1201 08:19:45.016567 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596"} err="failed to get container status \"bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596\": rpc error: code = NotFound desc = could not find container \"bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596\": container with ID starting with bb2f0b8808ad3635f09e130852cc3d367bb183bf877668dd6bc0965d4b0cd596 not found: ID does not exist" Dec 01 08:19:45 crc kubenswrapper[4822]: I1201 08:19:45.016596 4822 scope.go:117] "RemoveContainer" containerID="c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c" Dec 01 08:19:45 crc kubenswrapper[4822]: E1201 08:19:45.016999 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c\": container with ID starting with c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c not found: ID does not exist" containerID="c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c" Dec 01 08:19:45 crc kubenswrapper[4822]: I1201 08:19:45.017030 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c"} err="failed to get container status \"c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c\": rpc error: code = NotFound desc = could not find container \"c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c\": container with ID starting with c293a39b2bd27ae9ebdf0cb467a4c6c5b02d1d157d356ddd11b3b104fdc7298c not found: ID does not exist" Dec 01 08:19:45 crc kubenswrapper[4822]: I1201 08:19:45.017047 4822 scope.go:117] "RemoveContainer" containerID="a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184" Dec 01 08:19:45 crc kubenswrapper[4822]: E1201 08:19:45.017319 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184\": container with ID starting with a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184 not found: ID does not exist" containerID="a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184" Dec 01 08:19:45 crc kubenswrapper[4822]: I1201 08:19:45.017394 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184"} err="failed to get container status \"a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184\": rpc error: code = NotFound desc = could not find container \"a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184\": container with ID starting with a294e3a04017fde87278bd4eafff9ac0ee9d6081466b920498544ad88fda8184 not found: ID does not exist" Dec 01 08:19:46 crc kubenswrapper[4822]: I1201 08:19:46.959659 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" path="/var/lib/kubelet/pods/2b9f6fd7-a630-413d-8e1e-615ae1e552e1/volumes" Dec 01 08:19:47 crc kubenswrapper[4822]: E1201 08:19:47.836408 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice\": RecentStats: unable to find data in memory cache]" Dec 01 08:19:58 crc kubenswrapper[4822]: E1201 08:19:58.092597 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice\": RecentStats: unable to find data in memory cache]" Dec 01 08:20:08 crc kubenswrapper[4822]: E1201 08:20:08.263878 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice\": RecentStats: unable to find data in memory cache]" Dec 01 08:20:18 crc kubenswrapper[4822]: E1201 08:20:18.501944 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice\": RecentStats: unable to find data in memory cache]" Dec 01 08:20:28 crc kubenswrapper[4822]: E1201 08:20:28.693891 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice\": RecentStats: unable to find data in memory cache]" Dec 01 08:20:38 crc kubenswrapper[4822]: E1201 08:20:38.899245 4822 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b9f6fd7_a630_413d_8e1e_615ae1e552e1.slice\": RecentStats: unable to find data in memory cache]" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.235294 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k8gh5"] Dec 01 08:21:38 crc kubenswrapper[4822]: E1201 08:21:38.236156 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="extract-utilities" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.236182 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="extract-utilities" Dec 01 08:21:38 crc kubenswrapper[4822]: E1201 08:21:38.236211 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="extract-content" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.236219 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="extract-content" Dec 01 08:21:38 crc kubenswrapper[4822]: E1201 08:21:38.236232 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="registry-server" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.236242 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="registry-server" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.236504 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b9f6fd7-a630-413d-8e1e-615ae1e552e1" containerName="registry-server" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.237866 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.252430 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8gh5"] Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.367598 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md9ht\" (UniqueName: \"kubernetes.io/projected/e49c3bce-716a-45f6-abb4-d0426bba56c2-kube-api-access-md9ht\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.367714 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-utilities\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.367782 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-catalog-content\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.469364 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-catalog-content\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.469457 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md9ht\" (UniqueName: \"kubernetes.io/projected/e49c3bce-716a-45f6-abb4-d0426bba56c2-kube-api-access-md9ht\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.469519 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-utilities\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.470245 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-utilities\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.470594 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-catalog-content\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.500809 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md9ht\" (UniqueName: \"kubernetes.io/projected/e49c3bce-716a-45f6-abb4-d0426bba56c2-kube-api-access-md9ht\") pod \"community-operators-k8gh5\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:38 crc kubenswrapper[4822]: I1201 08:21:38.581167 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:39 crc kubenswrapper[4822]: I1201 08:21:39.201085 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8gh5"] Dec 01 08:21:40 crc kubenswrapper[4822]: I1201 08:21:40.048777 4822 generic.go:334] "Generic (PLEG): container finished" podID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerID="9f9ec35ccd7bf2c9733313cf32df18e55652b7ae887169740751eabeee83aaeb" exitCode=0 Dec 01 08:21:40 crc kubenswrapper[4822]: I1201 08:21:40.050749 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerDied","Data":"9f9ec35ccd7bf2c9733313cf32df18e55652b7ae887169740751eabeee83aaeb"} Dec 01 08:21:40 crc kubenswrapper[4822]: I1201 08:21:40.052080 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerStarted","Data":"94ee159cc8d1db138f38b835579607a071b4aa4e31f4a8d6166a87fc859f3120"} Dec 01 08:21:41 crc kubenswrapper[4822]: I1201 08:21:41.064954 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerStarted","Data":"3224d8e95edcbb611765eddff7a6b217048ff5e010f2c651e84ce83a152ed053"} Dec 01 08:21:42 crc kubenswrapper[4822]: I1201 08:21:42.076948 4822 generic.go:334] "Generic (PLEG): container finished" podID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerID="3224d8e95edcbb611765eddff7a6b217048ff5e010f2c651e84ce83a152ed053" exitCode=0 Dec 01 08:21:42 crc kubenswrapper[4822]: I1201 08:21:42.076975 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerDied","Data":"3224d8e95edcbb611765eddff7a6b217048ff5e010f2c651e84ce83a152ed053"} Dec 01 08:21:42 crc kubenswrapper[4822]: I1201 08:21:42.542528 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:21:42 crc kubenswrapper[4822]: I1201 08:21:42.542654 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:21:43 crc kubenswrapper[4822]: I1201 08:21:43.093317 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerStarted","Data":"eaa2df1c181ee42385148864238be0664f941b9146fbfdf0428bd149ada4f212"} Dec 01 08:21:43 crc kubenswrapper[4822]: I1201 08:21:43.128993 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k8gh5" podStartSLOduration=2.387314568 podStartE2EDuration="5.128963302s" podCreationTimestamp="2025-12-01 08:21:38 +0000 UTC" firstStartedPulling="2025-12-01 08:21:40.051617483 +0000 UTC m=+5455.372425209" lastFinishedPulling="2025-12-01 08:21:42.793266237 +0000 UTC m=+5458.114073943" observedRunningTime="2025-12-01 08:21:43.120876483 +0000 UTC m=+5458.441684179" watchObservedRunningTime="2025-12-01 08:21:43.128963302 +0000 UTC m=+5458.449771028" Dec 01 08:21:48 crc kubenswrapper[4822]: I1201 08:21:48.581691 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:48 crc kubenswrapper[4822]: I1201 08:21:48.582134 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:48 crc kubenswrapper[4822]: I1201 08:21:48.646738 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:49 crc kubenswrapper[4822]: I1201 08:21:49.183260 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:49 crc kubenswrapper[4822]: I1201 08:21:49.223399 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k8gh5"] Dec 01 08:21:51 crc kubenswrapper[4822]: I1201 08:21:51.164346 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k8gh5" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="registry-server" containerID="cri-o://eaa2df1c181ee42385148864238be0664f941b9146fbfdf0428bd149ada4f212" gracePeriod=2 Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.178350 4822 generic.go:334] "Generic (PLEG): container finished" podID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerID="eaa2df1c181ee42385148864238be0664f941b9146fbfdf0428bd149ada4f212" exitCode=0 Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.178685 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerDied","Data":"eaa2df1c181ee42385148864238be0664f941b9146fbfdf0428bd149ada4f212"} Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.345701 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.358600 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-utilities\") pod \"e49c3bce-716a-45f6-abb4-d0426bba56c2\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.358717 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md9ht\" (UniqueName: \"kubernetes.io/projected/e49c3bce-716a-45f6-abb4-d0426bba56c2-kube-api-access-md9ht\") pod \"e49c3bce-716a-45f6-abb4-d0426bba56c2\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.358739 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-catalog-content\") pod \"e49c3bce-716a-45f6-abb4-d0426bba56c2\" (UID: \"e49c3bce-716a-45f6-abb4-d0426bba56c2\") " Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.359610 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-utilities" (OuterVolumeSpecName: "utilities") pod "e49c3bce-716a-45f6-abb4-d0426bba56c2" (UID: "e49c3bce-716a-45f6-abb4-d0426bba56c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.379004 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e49c3bce-716a-45f6-abb4-d0426bba56c2-kube-api-access-md9ht" (OuterVolumeSpecName: "kube-api-access-md9ht") pod "e49c3bce-716a-45f6-abb4-d0426bba56c2" (UID: "e49c3bce-716a-45f6-abb4-d0426bba56c2"). InnerVolumeSpecName "kube-api-access-md9ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.430456 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e49c3bce-716a-45f6-abb4-d0426bba56c2" (UID: "e49c3bce-716a-45f6-abb4-d0426bba56c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.459780 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md9ht\" (UniqueName: \"kubernetes.io/projected/e49c3bce-716a-45f6-abb4-d0426bba56c2-kube-api-access-md9ht\") on node \"crc\" DevicePath \"\"" Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.459814 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:21:52 crc kubenswrapper[4822]: I1201 08:21:52.459828 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49c3bce-716a-45f6-abb4-d0426bba56c2-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.191337 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8gh5" event={"ID":"e49c3bce-716a-45f6-abb4-d0426bba56c2","Type":"ContainerDied","Data":"94ee159cc8d1db138f38b835579607a071b4aa4e31f4a8d6166a87fc859f3120"} Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.192245 4822 scope.go:117] "RemoveContainer" containerID="eaa2df1c181ee42385148864238be0664f941b9146fbfdf0428bd149ada4f212" Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.191654 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8gh5" Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.227821 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k8gh5"] Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.233536 4822 scope.go:117] "RemoveContainer" containerID="3224d8e95edcbb611765eddff7a6b217048ff5e010f2c651e84ce83a152ed053" Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.235379 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k8gh5"] Dec 01 08:21:53 crc kubenswrapper[4822]: I1201 08:21:53.255723 4822 scope.go:117] "RemoveContainer" containerID="9f9ec35ccd7bf2c9733313cf32df18e55652b7ae887169740751eabeee83aaeb" Dec 01 08:21:54 crc kubenswrapper[4822]: I1201 08:21:54.967054 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" path="/var/lib/kubelet/pods/e49c3bce-716a-45f6-abb4-d0426bba56c2/volumes" Dec 01 08:22:12 crc kubenswrapper[4822]: I1201 08:22:12.542777 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:22:12 crc kubenswrapper[4822]: I1201 08:22:12.543636 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:22:42 crc kubenswrapper[4822]: I1201 08:22:42.542301 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:22:42 crc kubenswrapper[4822]: I1201 08:22:42.542870 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:22:42 crc kubenswrapper[4822]: I1201 08:22:42.542918 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 08:22:42 crc kubenswrapper[4822]: I1201 08:22:42.543681 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c3304b89b43cb000a7e238f03a5a29d898f7d9ce4e8b306ae16df08e8c3d064"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 08:22:42 crc kubenswrapper[4822]: I1201 08:22:42.543746 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://8c3304b89b43cb000a7e238f03a5a29d898f7d9ce4e8b306ae16df08e8c3d064" gracePeriod=600 Dec 01 08:22:43 crc kubenswrapper[4822]: I1201 08:22:43.823746 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="8c3304b89b43cb000a7e238f03a5a29d898f7d9ce4e8b306ae16df08e8c3d064" exitCode=0 Dec 01 08:22:43 crc kubenswrapper[4822]: I1201 08:22:43.823798 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"8c3304b89b43cb000a7e238f03a5a29d898f7d9ce4e8b306ae16df08e8c3d064"} Dec 01 08:22:43 crc kubenswrapper[4822]: I1201 08:22:43.824166 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302"} Dec 01 08:22:43 crc kubenswrapper[4822]: I1201 08:22:43.824201 4822 scope.go:117] "RemoveContainer" containerID="ae62209792fd063e1530caa6fdc91a56031f7dde1b2e256e7377758391b01856" Dec 01 08:24:42 crc kubenswrapper[4822]: I1201 08:24:42.543256 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:24:42 crc kubenswrapper[4822]: I1201 08:24:42.543778 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:25:12 crc kubenswrapper[4822]: I1201 08:25:12.542393 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:25:12 crc kubenswrapper[4822]: I1201 08:25:12.542955 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:25:42 crc kubenswrapper[4822]: I1201 08:25:42.543210 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:25:42 crc kubenswrapper[4822]: I1201 08:25:42.543887 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:25:42 crc kubenswrapper[4822]: I1201 08:25:42.543951 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 08:25:42 crc kubenswrapper[4822]: I1201 08:25:42.544939 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 08:25:42 crc kubenswrapper[4822]: I1201 08:25:42.545036 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" gracePeriod=600 Dec 01 08:25:42 crc kubenswrapper[4822]: E1201 08:25:42.686411 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:25:43 crc kubenswrapper[4822]: I1201 08:25:43.491674 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" exitCode=0 Dec 01 08:25:43 crc kubenswrapper[4822]: I1201 08:25:43.491762 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302"} Dec 01 08:25:43 crc kubenswrapper[4822]: I1201 08:25:43.491893 4822 scope.go:117] "RemoveContainer" containerID="8c3304b89b43cb000a7e238f03a5a29d898f7d9ce4e8b306ae16df08e8c3d064" Dec 01 08:25:43 crc kubenswrapper[4822]: I1201 08:25:43.492467 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:25:43 crc kubenswrapper[4822]: E1201 08:25:43.492798 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:25:57 crc kubenswrapper[4822]: I1201 08:25:57.951642 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:25:57 crc kubenswrapper[4822]: E1201 08:25:57.952778 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:26:09 crc kubenswrapper[4822]: I1201 08:26:09.951685 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:26:09 crc kubenswrapper[4822]: E1201 08:26:09.952665 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:26:21 crc kubenswrapper[4822]: I1201 08:26:21.951642 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:26:21 crc kubenswrapper[4822]: E1201 08:26:21.952740 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:26:32 crc kubenswrapper[4822]: I1201 08:26:32.951600 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:26:32 crc kubenswrapper[4822]: E1201 08:26:32.952383 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:26:45 crc kubenswrapper[4822]: I1201 08:26:45.951482 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:26:45 crc kubenswrapper[4822]: E1201 08:26:45.953596 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:26:58 crc kubenswrapper[4822]: I1201 08:26:58.951652 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:26:58 crc kubenswrapper[4822]: E1201 08:26:58.952674 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:27:12 crc kubenswrapper[4822]: I1201 08:27:12.951118 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:27:12 crc kubenswrapper[4822]: E1201 08:27:12.951839 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:27:23 crc kubenswrapper[4822]: I1201 08:27:23.950420 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:27:23 crc kubenswrapper[4822]: E1201 08:27:23.951177 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:27:35 crc kubenswrapper[4822]: I1201 08:27:35.950814 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:27:35 crc kubenswrapper[4822]: E1201 08:27:35.951957 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:27:49 crc kubenswrapper[4822]: I1201 08:27:49.950695 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:27:49 crc kubenswrapper[4822]: E1201 08:27:49.951576 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:28:01 crc kubenswrapper[4822]: I1201 08:28:01.952591 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:28:01 crc kubenswrapper[4822]: E1201 08:28:01.953853 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:28:15 crc kubenswrapper[4822]: I1201 08:28:15.951023 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:28:15 crc kubenswrapper[4822]: E1201 08:28:15.952309 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:28:28 crc kubenswrapper[4822]: I1201 08:28:28.950940 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:28:28 crc kubenswrapper[4822]: E1201 08:28:28.951800 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:28:43 crc kubenswrapper[4822]: I1201 08:28:43.950947 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:28:43 crc kubenswrapper[4822]: E1201 08:28:43.952086 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:28:57 crc kubenswrapper[4822]: I1201 08:28:57.951060 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:28:57 crc kubenswrapper[4822]: E1201 08:28:57.952511 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:29:09 crc kubenswrapper[4822]: I1201 08:29:09.952051 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:29:09 crc kubenswrapper[4822]: E1201 08:29:09.953338 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.338050 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-xd6t4"] Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.345238 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-xd6t4"] Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.459158 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-jk59n"] Dec 01 08:29:20 crc kubenswrapper[4822]: E1201 08:29:20.459828 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="extract-content" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.459886 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="extract-content" Dec 01 08:29:20 crc kubenswrapper[4822]: E1201 08:29:20.459933 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="registry-server" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.459946 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="registry-server" Dec 01 08:29:20 crc kubenswrapper[4822]: E1201 08:29:20.459975 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="extract-utilities" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.459988 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="extract-utilities" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.460259 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="e49c3bce-716a-45f6-abb4-d0426bba56c2" containerName="registry-server" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.461037 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.465657 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.465890 4822 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-xwxqj" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.465898 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.470289 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.478363 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-jk59n"] Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.510694 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwdgn\" (UniqueName: \"kubernetes.io/projected/594f3841-d597-4bd3-985e-b2d63b0c5469-kube-api-access-jwdgn\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.511183 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/594f3841-d597-4bd3-985e-b2d63b0c5469-node-mnt\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.511502 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/594f3841-d597-4bd3-985e-b2d63b0c5469-crc-storage\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.612833 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/594f3841-d597-4bd3-985e-b2d63b0c5469-crc-storage\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.613014 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwdgn\" (UniqueName: \"kubernetes.io/projected/594f3841-d597-4bd3-985e-b2d63b0c5469-kube-api-access-jwdgn\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.613078 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/594f3841-d597-4bd3-985e-b2d63b0c5469-node-mnt\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.613452 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/594f3841-d597-4bd3-985e-b2d63b0c5469-node-mnt\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.613904 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/594f3841-d597-4bd3-985e-b2d63b0c5469-crc-storage\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.640536 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwdgn\" (UniqueName: \"kubernetes.io/projected/594f3841-d597-4bd3-985e-b2d63b0c5469-kube-api-access-jwdgn\") pod \"crc-storage-crc-jk59n\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.786532 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:20 crc kubenswrapper[4822]: I1201 08:29:20.963760 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="049bc621-4bdc-4c58-9151-6506548287c1" path="/var/lib/kubelet/pods/049bc621-4bdc-4c58-9151-6506548287c1/volumes" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.329998 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-jk59n"] Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.337110 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.397292 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jk59n" event={"ID":"594f3841-d597-4bd3-985e-b2d63b0c5469","Type":"ContainerStarted","Data":"5fd8b81b61ae3b7ef2731d532bc856eae409b67e46baf9e4b444b05d75fe1f5d"} Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.763016 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2xk8n"] Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.765159 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.825481 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2xk8n"] Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.840288 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-catalog-content\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.840483 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77xkk\" (UniqueName: \"kubernetes.io/projected/6b7beb79-b421-4607-bbf5-834b0f6f7b20-kube-api-access-77xkk\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.840579 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-utilities\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.941846 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77xkk\" (UniqueName: \"kubernetes.io/projected/6b7beb79-b421-4607-bbf5-834b0f6f7b20-kube-api-access-77xkk\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.942074 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-utilities\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.942233 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-catalog-content\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.942835 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-catalog-content\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.942837 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-utilities\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:21 crc kubenswrapper[4822]: I1201 08:29:21.967963 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77xkk\" (UniqueName: \"kubernetes.io/projected/6b7beb79-b421-4607-bbf5-834b0f6f7b20-kube-api-access-77xkk\") pod \"redhat-marketplace-2xk8n\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:22 crc kubenswrapper[4822]: I1201 08:29:22.095449 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:22 crc kubenswrapper[4822]: I1201 08:29:22.336821 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2xk8n"] Dec 01 08:29:22 crc kubenswrapper[4822]: W1201 08:29:22.343038 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b7beb79_b421_4607_bbf5_834b0f6f7b20.slice/crio-8198d9ed0def0dfd4367b904032ce6170b2284421bc0460235589486cedf7dd9 WatchSource:0}: Error finding container 8198d9ed0def0dfd4367b904032ce6170b2284421bc0460235589486cedf7dd9: Status 404 returned error can't find the container with id 8198d9ed0def0dfd4367b904032ce6170b2284421bc0460235589486cedf7dd9 Dec 01 08:29:22 crc kubenswrapper[4822]: I1201 08:29:22.405784 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2xk8n" event={"ID":"6b7beb79-b421-4607-bbf5-834b0f6f7b20","Type":"ContainerStarted","Data":"8198d9ed0def0dfd4367b904032ce6170b2284421bc0460235589486cedf7dd9"} Dec 01 08:29:22 crc kubenswrapper[4822]: I1201 08:29:22.407795 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jk59n" event={"ID":"594f3841-d597-4bd3-985e-b2d63b0c5469","Type":"ContainerStarted","Data":"9294cf514719f7169e03adea9c33d534acdd9214a5697d2d66e97dff5d033631"} Dec 01 08:29:22 crc kubenswrapper[4822]: I1201 08:29:22.430383 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-jk59n" podStartSLOduration=1.735623543 podStartE2EDuration="2.430361542s" podCreationTimestamp="2025-12-01 08:29:20 +0000 UTC" firstStartedPulling="2025-12-01 08:29:21.336799284 +0000 UTC m=+5916.657606980" lastFinishedPulling="2025-12-01 08:29:22.031537293 +0000 UTC m=+5917.352344979" observedRunningTime="2025-12-01 08:29:22.423487228 +0000 UTC m=+5917.744294914" watchObservedRunningTime="2025-12-01 08:29:22.430361542 +0000 UTC m=+5917.751169228" Dec 01 08:29:23 crc kubenswrapper[4822]: I1201 08:29:23.418152 4822 generic.go:334] "Generic (PLEG): container finished" podID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerID="030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622" exitCode=0 Dec 01 08:29:23 crc kubenswrapper[4822]: I1201 08:29:23.418194 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2xk8n" event={"ID":"6b7beb79-b421-4607-bbf5-834b0f6f7b20","Type":"ContainerDied","Data":"030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622"} Dec 01 08:29:23 crc kubenswrapper[4822]: I1201 08:29:23.420736 4822 generic.go:334] "Generic (PLEG): container finished" podID="594f3841-d597-4bd3-985e-b2d63b0c5469" containerID="9294cf514719f7169e03adea9c33d534acdd9214a5697d2d66e97dff5d033631" exitCode=0 Dec 01 08:29:23 crc kubenswrapper[4822]: I1201 08:29:23.420791 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jk59n" event={"ID":"594f3841-d597-4bd3-985e-b2d63b0c5469","Type":"ContainerDied","Data":"9294cf514719f7169e03adea9c33d534acdd9214a5697d2d66e97dff5d033631"} Dec 01 08:29:23 crc kubenswrapper[4822]: I1201 08:29:23.950706 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:29:23 crc kubenswrapper[4822]: E1201 08:29:23.950987 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.866583 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.892336 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwdgn\" (UniqueName: \"kubernetes.io/projected/594f3841-d597-4bd3-985e-b2d63b0c5469-kube-api-access-jwdgn\") pod \"594f3841-d597-4bd3-985e-b2d63b0c5469\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.892608 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/594f3841-d597-4bd3-985e-b2d63b0c5469-crc-storage\") pod \"594f3841-d597-4bd3-985e-b2d63b0c5469\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.892631 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/594f3841-d597-4bd3-985e-b2d63b0c5469-node-mnt\") pod \"594f3841-d597-4bd3-985e-b2d63b0c5469\" (UID: \"594f3841-d597-4bd3-985e-b2d63b0c5469\") " Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.892837 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/594f3841-d597-4bd3-985e-b2d63b0c5469-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "594f3841-d597-4bd3-985e-b2d63b0c5469" (UID: "594f3841-d597-4bd3-985e-b2d63b0c5469"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.893075 4822 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/594f3841-d597-4bd3-985e-b2d63b0c5469-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.898819 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594f3841-d597-4bd3-985e-b2d63b0c5469-kube-api-access-jwdgn" (OuterVolumeSpecName: "kube-api-access-jwdgn") pod "594f3841-d597-4bd3-985e-b2d63b0c5469" (UID: "594f3841-d597-4bd3-985e-b2d63b0c5469"). InnerVolumeSpecName "kube-api-access-jwdgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.921258 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/594f3841-d597-4bd3-985e-b2d63b0c5469-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "594f3841-d597-4bd3-985e-b2d63b0c5469" (UID: "594f3841-d597-4bd3-985e-b2d63b0c5469"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.995086 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwdgn\" (UniqueName: \"kubernetes.io/projected/594f3841-d597-4bd3-985e-b2d63b0c5469-kube-api-access-jwdgn\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:24 crc kubenswrapper[4822]: I1201 08:29:24.995722 4822 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/594f3841-d597-4bd3-985e-b2d63b0c5469-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:25 crc kubenswrapper[4822]: I1201 08:29:25.443761 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-jk59n" event={"ID":"594f3841-d597-4bd3-985e-b2d63b0c5469","Type":"ContainerDied","Data":"5fd8b81b61ae3b7ef2731d532bc856eae409b67e46baf9e4b444b05d75fe1f5d"} Dec 01 08:29:25 crc kubenswrapper[4822]: I1201 08:29:25.444049 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fd8b81b61ae3b7ef2731d532bc856eae409b67e46baf9e4b444b05d75fe1f5d" Dec 01 08:29:25 crc kubenswrapper[4822]: I1201 08:29:25.443850 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-jk59n" Dec 01 08:29:25 crc kubenswrapper[4822]: I1201 08:29:25.446806 4822 generic.go:334] "Generic (PLEG): container finished" podID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerID="926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0" exitCode=0 Dec 01 08:29:25 crc kubenswrapper[4822]: I1201 08:29:25.446861 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2xk8n" event={"ID":"6b7beb79-b421-4607-bbf5-834b0f6f7b20","Type":"ContainerDied","Data":"926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0"} Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.456337 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2xk8n" event={"ID":"6b7beb79-b421-4607-bbf5-834b0f6f7b20","Type":"ContainerStarted","Data":"426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9"} Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.492024 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2xk8n" podStartSLOduration=2.98056221 podStartE2EDuration="5.492000213s" podCreationTimestamp="2025-12-01 08:29:21 +0000 UTC" firstStartedPulling="2025-12-01 08:29:23.422788041 +0000 UTC m=+5918.743595747" lastFinishedPulling="2025-12-01 08:29:25.934226054 +0000 UTC m=+5921.255033750" observedRunningTime="2025-12-01 08:29:26.491118488 +0000 UTC m=+5921.811926174" watchObservedRunningTime="2025-12-01 08:29:26.492000213 +0000 UTC m=+5921.812807939" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.552205 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-jk59n"] Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.559112 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-jk59n"] Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.664484 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-rwq5g"] Dec 01 08:29:26 crc kubenswrapper[4822]: E1201 08:29:26.664887 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594f3841-d597-4bd3-985e-b2d63b0c5469" containerName="storage" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.664907 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="594f3841-d597-4bd3-985e-b2d63b0c5469" containerName="storage" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.665177 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="594f3841-d597-4bd3-985e-b2d63b0c5469" containerName="storage" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.665773 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.669576 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.669731 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.669834 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.670030 4822 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-xwxqj" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.676069 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-rwq5g"] Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.735922 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x688\" (UniqueName: \"kubernetes.io/projected/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-kube-api-access-4x688\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.735980 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-crc-storage\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.736210 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-node-mnt\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.837535 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-node-mnt\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.837881 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x688\" (UniqueName: \"kubernetes.io/projected/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-kube-api-access-4x688\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.837973 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-crc-storage\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.837870 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-node-mnt\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.838658 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-crc-storage\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.857992 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x688\" (UniqueName: \"kubernetes.io/projected/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-kube-api-access-4x688\") pod \"crc-storage-crc-rwq5g\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.958309 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594f3841-d597-4bd3-985e-b2d63b0c5469" path="/var/lib/kubelet/pods/594f3841-d597-4bd3-985e-b2d63b0c5469/volumes" Dec 01 08:29:26 crc kubenswrapper[4822]: I1201 08:29:26.992402 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:27 crc kubenswrapper[4822]: I1201 08:29:27.208148 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-rwq5g"] Dec 01 08:29:27 crc kubenswrapper[4822]: W1201 08:29:27.217671 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5e9a73a_4eb8_4a81_9aee_28bde358b13a.slice/crio-029931a63e7053dc9d3ace5e89e3cf1fbebf61f239baf54f195f010971cde27c WatchSource:0}: Error finding container 029931a63e7053dc9d3ace5e89e3cf1fbebf61f239baf54f195f010971cde27c: Status 404 returned error can't find the container with id 029931a63e7053dc9d3ace5e89e3cf1fbebf61f239baf54f195f010971cde27c Dec 01 08:29:27 crc kubenswrapper[4822]: I1201 08:29:27.464756 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-rwq5g" event={"ID":"d5e9a73a-4eb8-4a81-9aee-28bde358b13a","Type":"ContainerStarted","Data":"029931a63e7053dc9d3ace5e89e3cf1fbebf61f239baf54f195f010971cde27c"} Dec 01 08:29:28 crc kubenswrapper[4822]: I1201 08:29:28.473504 4822 generic.go:334] "Generic (PLEG): container finished" podID="d5e9a73a-4eb8-4a81-9aee-28bde358b13a" containerID="938c9d6f53a87433cfa05b460dee65434e12ae3c2a98c8b18396655c54a94f0b" exitCode=0 Dec 01 08:29:28 crc kubenswrapper[4822]: I1201 08:29:28.473591 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-rwq5g" event={"ID":"d5e9a73a-4eb8-4a81-9aee-28bde358b13a","Type":"ContainerDied","Data":"938c9d6f53a87433cfa05b460dee65434e12ae3c2a98c8b18396655c54a94f0b"} Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.741420 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.779806 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-node-mnt\") pod \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.780207 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d5e9a73a-4eb8-4a81-9aee-28bde358b13a" (UID: "d5e9a73a-4eb8-4a81-9aee-28bde358b13a"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.780473 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-crc-storage\") pod \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.780541 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x688\" (UniqueName: \"kubernetes.io/projected/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-kube-api-access-4x688\") pod \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\" (UID: \"d5e9a73a-4eb8-4a81-9aee-28bde358b13a\") " Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.782171 4822 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.795112 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-kube-api-access-4x688" (OuterVolumeSpecName: "kube-api-access-4x688") pod "d5e9a73a-4eb8-4a81-9aee-28bde358b13a" (UID: "d5e9a73a-4eb8-4a81-9aee-28bde358b13a"). InnerVolumeSpecName "kube-api-access-4x688". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.815381 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d5e9a73a-4eb8-4a81-9aee-28bde358b13a" (UID: "d5e9a73a-4eb8-4a81-9aee-28bde358b13a"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.883955 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x688\" (UniqueName: \"kubernetes.io/projected/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-kube-api-access-4x688\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:29 crc kubenswrapper[4822]: I1201 08:29:29.884205 4822 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d5e9a73a-4eb8-4a81-9aee-28bde358b13a-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:30 crc kubenswrapper[4822]: I1201 08:29:30.500034 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-rwq5g" event={"ID":"d5e9a73a-4eb8-4a81-9aee-28bde358b13a","Type":"ContainerDied","Data":"029931a63e7053dc9d3ace5e89e3cf1fbebf61f239baf54f195f010971cde27c"} Dec 01 08:29:30 crc kubenswrapper[4822]: I1201 08:29:30.500090 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="029931a63e7053dc9d3ace5e89e3cf1fbebf61f239baf54f195f010971cde27c" Dec 01 08:29:30 crc kubenswrapper[4822]: I1201 08:29:30.500168 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rwq5g" Dec 01 08:29:32 crc kubenswrapper[4822]: I1201 08:29:32.095806 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:32 crc kubenswrapper[4822]: I1201 08:29:32.096060 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:32 crc kubenswrapper[4822]: I1201 08:29:32.156172 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:32 crc kubenswrapper[4822]: I1201 08:29:32.565999 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:32 crc kubenswrapper[4822]: I1201 08:29:32.622768 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2xk8n"] Dec 01 08:29:34 crc kubenswrapper[4822]: I1201 08:29:34.532205 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2xk8n" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="registry-server" containerID="cri-o://426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9" gracePeriod=2 Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.485471 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.539748 4822 generic.go:334] "Generic (PLEG): container finished" podID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerID="426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9" exitCode=0 Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.539788 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2xk8n" event={"ID":"6b7beb79-b421-4607-bbf5-834b0f6f7b20","Type":"ContainerDied","Data":"426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9"} Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.539813 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2xk8n" event={"ID":"6b7beb79-b421-4607-bbf5-834b0f6f7b20","Type":"ContainerDied","Data":"8198d9ed0def0dfd4367b904032ce6170b2284421bc0460235589486cedf7dd9"} Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.539830 4822 scope.go:117] "RemoveContainer" containerID="426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.539943 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2xk8n" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.560937 4822 scope.go:117] "RemoveContainer" containerID="926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.561198 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-catalog-content\") pod \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.561298 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77xkk\" (UniqueName: \"kubernetes.io/projected/6b7beb79-b421-4607-bbf5-834b0f6f7b20-kube-api-access-77xkk\") pod \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.561342 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-utilities\") pod \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\" (UID: \"6b7beb79-b421-4607-bbf5-834b0f6f7b20\") " Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.562425 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-utilities" (OuterVolumeSpecName: "utilities") pod "6b7beb79-b421-4607-bbf5-834b0f6f7b20" (UID: "6b7beb79-b421-4607-bbf5-834b0f6f7b20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.568950 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b7beb79-b421-4607-bbf5-834b0f6f7b20-kube-api-access-77xkk" (OuterVolumeSpecName: "kube-api-access-77xkk") pod "6b7beb79-b421-4607-bbf5-834b0f6f7b20" (UID: "6b7beb79-b421-4607-bbf5-834b0f6f7b20"). InnerVolumeSpecName "kube-api-access-77xkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.588774 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b7beb79-b421-4607-bbf5-834b0f6f7b20" (UID: "6b7beb79-b421-4607-bbf5-834b0f6f7b20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.608998 4822 scope.go:117] "RemoveContainer" containerID="030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.634365 4822 scope.go:117] "RemoveContainer" containerID="426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9" Dec 01 08:29:35 crc kubenswrapper[4822]: E1201 08:29:35.635013 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9\": container with ID starting with 426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9 not found: ID does not exist" containerID="426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.635052 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9"} err="failed to get container status \"426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9\": rpc error: code = NotFound desc = could not find container \"426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9\": container with ID starting with 426e81415b0b1b125210e084715da1274b504a18488ba48f59a5f68ef6a6a6d9 not found: ID does not exist" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.635092 4822 scope.go:117] "RemoveContainer" containerID="926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0" Dec 01 08:29:35 crc kubenswrapper[4822]: E1201 08:29:35.635419 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0\": container with ID starting with 926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0 not found: ID does not exist" containerID="926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.635444 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0"} err="failed to get container status \"926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0\": rpc error: code = NotFound desc = could not find container \"926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0\": container with ID starting with 926715fc37efc376d30d12d8e3493c6018c7d1e1c6da3896a1832e1080c107b0 not found: ID does not exist" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.635460 4822 scope.go:117] "RemoveContainer" containerID="030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622" Dec 01 08:29:35 crc kubenswrapper[4822]: E1201 08:29:35.635822 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622\": container with ID starting with 030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622 not found: ID does not exist" containerID="030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.635848 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622"} err="failed to get container status \"030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622\": rpc error: code = NotFound desc = could not find container \"030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622\": container with ID starting with 030d54642f3f4ba4ac214231fb4b501fdc093869cec217f75a7a236870ddf622 not found: ID does not exist" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.662609 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.662655 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b7beb79-b421-4607-bbf5-834b0f6f7b20-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.662667 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77xkk\" (UniqueName: \"kubernetes.io/projected/6b7beb79-b421-4607-bbf5-834b0f6f7b20-kube-api-access-77xkk\") on node \"crc\" DevicePath \"\"" Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.874624 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2xk8n"] Dec 01 08:29:35 crc kubenswrapper[4822]: I1201 08:29:35.883814 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2xk8n"] Dec 01 08:29:36 crc kubenswrapper[4822]: I1201 08:29:36.950960 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:29:36 crc kubenswrapper[4822]: E1201 08:29:36.951346 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:29:36 crc kubenswrapper[4822]: I1201 08:29:36.967181 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" path="/var/lib/kubelet/pods/6b7beb79-b421-4607-bbf5-834b0f6f7b20/volumes" Dec 01 08:29:51 crc kubenswrapper[4822]: I1201 08:29:51.951067 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:29:51 crc kubenswrapper[4822]: E1201 08:29:51.951908 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:29:51 crc kubenswrapper[4822]: I1201 08:29:51.989322 4822 scope.go:117] "RemoveContainer" containerID="4aa300e393bbab05fdf787c7f6c70c5f66f11f0765b53911efc5d0f0267cd733" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.147434 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb"] Dec 01 08:30:00 crc kubenswrapper[4822]: E1201 08:30:00.148685 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="registry-server" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.148708 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="registry-server" Dec 01 08:30:00 crc kubenswrapper[4822]: E1201 08:30:00.148740 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="extract-content" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.148751 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="extract-content" Dec 01 08:30:00 crc kubenswrapper[4822]: E1201 08:30:00.148782 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e9a73a-4eb8-4a81-9aee-28bde358b13a" containerName="storage" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.148797 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e9a73a-4eb8-4a81-9aee-28bde358b13a" containerName="storage" Dec 01 08:30:00 crc kubenswrapper[4822]: E1201 08:30:00.148811 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="extract-utilities" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.148822 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="extract-utilities" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.149051 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b7beb79-b421-4607-bbf5-834b0f6f7b20" containerName="registry-server" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.149086 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5e9a73a-4eb8-4a81-9aee-28bde358b13a" containerName="storage" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.149880 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.152672 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.152717 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.161429 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb"] Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.230717 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d2a869-759d-4a21-9f52-7e92309f311a-secret-volume\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.230789 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d2a869-759d-4a21-9f52-7e92309f311a-config-volume\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.230935 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcl5h\" (UniqueName: \"kubernetes.io/projected/c6d2a869-759d-4a21-9f52-7e92309f311a-kube-api-access-tcl5h\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.332523 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcl5h\" (UniqueName: \"kubernetes.io/projected/c6d2a869-759d-4a21-9f52-7e92309f311a-kube-api-access-tcl5h\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.332882 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d2a869-759d-4a21-9f52-7e92309f311a-secret-volume\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.333063 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d2a869-759d-4a21-9f52-7e92309f311a-config-volume\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.334246 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d2a869-759d-4a21-9f52-7e92309f311a-config-volume\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.339901 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d2a869-759d-4a21-9f52-7e92309f311a-secret-volume\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.349094 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcl5h\" (UniqueName: \"kubernetes.io/projected/c6d2a869-759d-4a21-9f52-7e92309f311a-kube-api-access-tcl5h\") pod \"collect-profiles-29409630-zk7kb\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.470381 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.716998 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb"] Dec 01 08:30:00 crc kubenswrapper[4822]: W1201 08:30:00.726351 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6d2a869_759d_4a21_9f52_7e92309f311a.slice/crio-2576a3050cff68b9cc2704732a498c33693655bc70f817d138ca70b4507e59fd WatchSource:0}: Error finding container 2576a3050cff68b9cc2704732a498c33693655bc70f817d138ca70b4507e59fd: Status 404 returned error can't find the container with id 2576a3050cff68b9cc2704732a498c33693655bc70f817d138ca70b4507e59fd Dec 01 08:30:00 crc kubenswrapper[4822]: I1201 08:30:00.755860 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" event={"ID":"c6d2a869-759d-4a21-9f52-7e92309f311a","Type":"ContainerStarted","Data":"2576a3050cff68b9cc2704732a498c33693655bc70f817d138ca70b4507e59fd"} Dec 01 08:30:01 crc kubenswrapper[4822]: I1201 08:30:01.768907 4822 generic.go:334] "Generic (PLEG): container finished" podID="c6d2a869-759d-4a21-9f52-7e92309f311a" containerID="aef5afe7dfbef21acc2ccfc49a504e6ddcb3da10b262948e7140eb900ddd9492" exitCode=0 Dec 01 08:30:01 crc kubenswrapper[4822]: I1201 08:30:01.769027 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" event={"ID":"c6d2a869-759d-4a21-9f52-7e92309f311a","Type":"ContainerDied","Data":"aef5afe7dfbef21acc2ccfc49a504e6ddcb3da10b262948e7140eb900ddd9492"} Dec 01 08:30:02 crc kubenswrapper[4822]: I1201 08:30:02.951017 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:30:02 crc kubenswrapper[4822]: E1201 08:30:02.951339 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.099446 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.281219 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d2a869-759d-4a21-9f52-7e92309f311a-secret-volume\") pod \"c6d2a869-759d-4a21-9f52-7e92309f311a\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.281328 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcl5h\" (UniqueName: \"kubernetes.io/projected/c6d2a869-759d-4a21-9f52-7e92309f311a-kube-api-access-tcl5h\") pod \"c6d2a869-759d-4a21-9f52-7e92309f311a\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.281479 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d2a869-759d-4a21-9f52-7e92309f311a-config-volume\") pod \"c6d2a869-759d-4a21-9f52-7e92309f311a\" (UID: \"c6d2a869-759d-4a21-9f52-7e92309f311a\") " Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.281993 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6d2a869-759d-4a21-9f52-7e92309f311a-config-volume" (OuterVolumeSpecName: "config-volume") pod "c6d2a869-759d-4a21-9f52-7e92309f311a" (UID: "c6d2a869-759d-4a21-9f52-7e92309f311a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.286255 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d2a869-759d-4a21-9f52-7e92309f311a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c6d2a869-759d-4a21-9f52-7e92309f311a" (UID: "c6d2a869-759d-4a21-9f52-7e92309f311a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.287745 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6d2a869-759d-4a21-9f52-7e92309f311a-kube-api-access-tcl5h" (OuterVolumeSpecName: "kube-api-access-tcl5h") pod "c6d2a869-759d-4a21-9f52-7e92309f311a" (UID: "c6d2a869-759d-4a21-9f52-7e92309f311a"). InnerVolumeSpecName "kube-api-access-tcl5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.384199 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcl5h\" (UniqueName: \"kubernetes.io/projected/c6d2a869-759d-4a21-9f52-7e92309f311a-kube-api-access-tcl5h\") on node \"crc\" DevicePath \"\"" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.384232 4822 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c6d2a869-759d-4a21-9f52-7e92309f311a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.384241 4822 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c6d2a869-759d-4a21-9f52-7e92309f311a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.790303 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" event={"ID":"c6d2a869-759d-4a21-9f52-7e92309f311a","Type":"ContainerDied","Data":"2576a3050cff68b9cc2704732a498c33693655bc70f817d138ca70b4507e59fd"} Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.790367 4822 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2576a3050cff68b9cc2704732a498c33693655bc70f817d138ca70b4507e59fd" Dec 01 08:30:03 crc kubenswrapper[4822]: I1201 08:30:03.790420 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409630-zk7kb" Dec 01 08:30:04 crc kubenswrapper[4822]: I1201 08:30:04.211495 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb"] Dec 01 08:30:04 crc kubenswrapper[4822]: I1201 08:30:04.219832 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409585-gzhvb"] Dec 01 08:30:04 crc kubenswrapper[4822]: I1201 08:30:04.962020 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="105964ba-59ad-4401-a3ff-9f0ac3bb7838" path="/var/lib/kubelet/pods/105964ba-59ad-4401-a3ff-9f0ac3bb7838/volumes" Dec 01 08:30:13 crc kubenswrapper[4822]: I1201 08:30:13.951967 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:30:13 crc kubenswrapper[4822]: E1201 08:30:13.952673 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:30:28 crc kubenswrapper[4822]: I1201 08:30:28.950843 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:30:28 crc kubenswrapper[4822]: E1201 08:30:28.952509 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.513298 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wt5c2/must-gather-ksz2h"] Dec 01 08:30:37 crc kubenswrapper[4822]: E1201 08:30:37.514066 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6d2a869-759d-4a21-9f52-7e92309f311a" containerName="collect-profiles" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.514082 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6d2a869-759d-4a21-9f52-7e92309f311a" containerName="collect-profiles" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.514243 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6d2a869-759d-4a21-9f52-7e92309f311a" containerName="collect-profiles" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.514989 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.517807 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wt5c2"/"openshift-service-ca.crt" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.517830 4822 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wt5c2"/"kube-root-ca.crt" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.518004 4822 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wt5c2"/"default-dockercfg-qjq84" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.525721 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wt5c2/must-gather-ksz2h"] Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.630356 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-927cb\" (UniqueName: \"kubernetes.io/projected/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-kube-api-access-927cb\") pod \"must-gather-ksz2h\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.630471 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-must-gather-output\") pod \"must-gather-ksz2h\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.731527 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-must-gather-output\") pod \"must-gather-ksz2h\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.731935 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-927cb\" (UniqueName: \"kubernetes.io/projected/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-kube-api-access-927cb\") pod \"must-gather-ksz2h\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.732204 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-must-gather-output\") pod \"must-gather-ksz2h\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.768074 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-927cb\" (UniqueName: \"kubernetes.io/projected/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-kube-api-access-927cb\") pod \"must-gather-ksz2h\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:37 crc kubenswrapper[4822]: I1201 08:30:37.839302 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.157369 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wt5c2/must-gather-ksz2h"] Dec 01 08:30:38 crc kubenswrapper[4822]: W1201 08:30:38.161392 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d03fdc4_435a_4d0f_99e6_92cef7c85ae0.slice/crio-8eb0e965dea094a6346badb58c4aa954c21250fe4754d3805f9e5d8d5891a3fc WatchSource:0}: Error finding container 8eb0e965dea094a6346badb58c4aa954c21250fe4754d3805f9e5d8d5891a3fc: Status 404 returned error can't find the container with id 8eb0e965dea094a6346badb58c4aa954c21250fe4754d3805f9e5d8d5891a3fc Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.839837 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9rgc8"] Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.841662 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.848312 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-utilities\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.848409 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-catalog-content\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.848607 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcb5n\" (UniqueName: \"kubernetes.io/projected/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-kube-api-access-xcb5n\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.862641 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9rgc8"] Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.949869 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-utilities\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.949935 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-catalog-content\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.950023 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcb5n\" (UniqueName: \"kubernetes.io/projected/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-kube-api-access-xcb5n\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.950821 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-utilities\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.951212 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-catalog-content\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:38 crc kubenswrapper[4822]: I1201 08:30:38.980385 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcb5n\" (UniqueName: \"kubernetes.io/projected/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-kube-api-access-xcb5n\") pod \"certified-operators-9rgc8\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:39 crc kubenswrapper[4822]: I1201 08:30:39.068352 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" event={"ID":"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0","Type":"ContainerStarted","Data":"8eb0e965dea094a6346badb58c4aa954c21250fe4754d3805f9e5d8d5891a3fc"} Dec 01 08:30:39 crc kubenswrapper[4822]: I1201 08:30:39.173316 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:39 crc kubenswrapper[4822]: I1201 08:30:39.445331 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9rgc8"] Dec 01 08:30:39 crc kubenswrapper[4822]: W1201 08:30:39.469166 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcf960a5_7cc4_4ed2_8bcb_2c64cd5238c1.slice/crio-9788d40b8206f24ea72c2a7d72188bc68389bf7b3787fcdbb46dfab6cd4f7649 WatchSource:0}: Error finding container 9788d40b8206f24ea72c2a7d72188bc68389bf7b3787fcdbb46dfab6cd4f7649: Status 404 returned error can't find the container with id 9788d40b8206f24ea72c2a7d72188bc68389bf7b3787fcdbb46dfab6cd4f7649 Dec 01 08:30:39 crc kubenswrapper[4822]: I1201 08:30:39.951304 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:30:39 crc kubenswrapper[4822]: E1201 08:30:39.951940 4822 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2cz64_openshift-machine-config-operator(a6c6a838-3829-4058-aa59-1302d07e4507)\"" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" Dec 01 08:30:40 crc kubenswrapper[4822]: I1201 08:30:40.078302 4822 generic.go:334] "Generic (PLEG): container finished" podID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerID="4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0" exitCode=0 Dec 01 08:30:40 crc kubenswrapper[4822]: I1201 08:30:40.078364 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rgc8" event={"ID":"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1","Type":"ContainerDied","Data":"4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0"} Dec 01 08:30:40 crc kubenswrapper[4822]: I1201 08:30:40.078403 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rgc8" event={"ID":"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1","Type":"ContainerStarted","Data":"9788d40b8206f24ea72c2a7d72188bc68389bf7b3787fcdbb46dfab6cd4f7649"} Dec 01 08:30:43 crc kubenswrapper[4822]: I1201 08:30:43.098046 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" event={"ID":"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0","Type":"ContainerStarted","Data":"0f8229835e2cb825778e8df659aeeaeb67980401fcc870e11e7c2f4ccdc4af8d"} Dec 01 08:30:43 crc kubenswrapper[4822]: I1201 08:30:43.098395 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" event={"ID":"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0","Type":"ContainerStarted","Data":"997b1611ad65124e039b824655242f4c2170ad2335a3fee2713edd51b9ec6982"} Dec 01 08:30:43 crc kubenswrapper[4822]: I1201 08:30:43.100161 4822 generic.go:334] "Generic (PLEG): container finished" podID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerID="4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a" exitCode=0 Dec 01 08:30:43 crc kubenswrapper[4822]: I1201 08:30:43.100204 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rgc8" event={"ID":"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1","Type":"ContainerDied","Data":"4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a"} Dec 01 08:30:43 crc kubenswrapper[4822]: I1201 08:30:43.117673 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" podStartSLOduration=1.979912191 podStartE2EDuration="6.117657585s" podCreationTimestamp="2025-12-01 08:30:37 +0000 UTC" firstStartedPulling="2025-12-01 08:30:38.163600817 +0000 UTC m=+5993.484408523" lastFinishedPulling="2025-12-01 08:30:42.301346231 +0000 UTC m=+5997.622153917" observedRunningTime="2025-12-01 08:30:43.11500089 +0000 UTC m=+5998.435808576" watchObservedRunningTime="2025-12-01 08:30:43.117657585 +0000 UTC m=+5998.438465271" Dec 01 08:30:44 crc kubenswrapper[4822]: I1201 08:30:44.116686 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rgc8" event={"ID":"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1","Type":"ContainerStarted","Data":"f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb"} Dec 01 08:30:44 crc kubenswrapper[4822]: I1201 08:30:44.142980 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9rgc8" podStartSLOduration=2.3611151550000002 podStartE2EDuration="6.142954549s" podCreationTimestamp="2025-12-01 08:30:38 +0000 UTC" firstStartedPulling="2025-12-01 08:30:40.080200388 +0000 UTC m=+5995.401008084" lastFinishedPulling="2025-12-01 08:30:43.862039792 +0000 UTC m=+5999.182847478" observedRunningTime="2025-12-01 08:30:44.136029704 +0000 UTC m=+5999.456837390" watchObservedRunningTime="2025-12-01 08:30:44.142954549 +0000 UTC m=+5999.463762235" Dec 01 08:30:49 crc kubenswrapper[4822]: I1201 08:30:49.174273 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:49 crc kubenswrapper[4822]: I1201 08:30:49.174860 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:49 crc kubenswrapper[4822]: I1201 08:30:49.223225 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:50 crc kubenswrapper[4822]: I1201 08:30:50.192810 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:50 crc kubenswrapper[4822]: I1201 08:30:50.236685 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9rgc8"] Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.075952 4822 scope.go:117] "RemoveContainer" containerID="7bfd6d1f7a9ab86bcf1e0a04da8dab4e66d74bb32f2b0918b0bfcb3abaf8e11e" Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.165308 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9rgc8" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="registry-server" containerID="cri-o://f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb" gracePeriod=2 Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.606156 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.760365 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-utilities\") pod \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.760461 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-catalog-content\") pod \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.760587 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcb5n\" (UniqueName: \"kubernetes.io/projected/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-kube-api-access-xcb5n\") pod \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\" (UID: \"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1\") " Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.761222 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-utilities" (OuterVolumeSpecName: "utilities") pod "fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" (UID: "fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.765393 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-kube-api-access-xcb5n" (OuterVolumeSpecName: "kube-api-access-xcb5n") pod "fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" (UID: "fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1"). InnerVolumeSpecName "kube-api-access-xcb5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.862054 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:30:52 crc kubenswrapper[4822]: I1201 08:30:52.862110 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcb5n\" (UniqueName: \"kubernetes.io/projected/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-kube-api-access-xcb5n\") on node \"crc\" DevicePath \"\"" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.069621 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" (UID: "fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.165469 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.174530 4822 generic.go:334] "Generic (PLEG): container finished" podID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerID="f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb" exitCode=0 Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.174592 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rgc8" event={"ID":"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1","Type":"ContainerDied","Data":"f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb"} Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.174621 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rgc8" event={"ID":"fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1","Type":"ContainerDied","Data":"9788d40b8206f24ea72c2a7d72188bc68389bf7b3787fcdbb46dfab6cd4f7649"} Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.174639 4822 scope.go:117] "RemoveContainer" containerID="f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.174791 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rgc8" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.207664 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9rgc8"] Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.211207 4822 scope.go:117] "RemoveContainer" containerID="4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.218910 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9rgc8"] Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.234873 4822 scope.go:117] "RemoveContainer" containerID="4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.262163 4822 scope.go:117] "RemoveContainer" containerID="f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb" Dec 01 08:30:53 crc kubenswrapper[4822]: E1201 08:30:53.264722 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb\": container with ID starting with f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb not found: ID does not exist" containerID="f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.264762 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb"} err="failed to get container status \"f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb\": rpc error: code = NotFound desc = could not find container \"f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb\": container with ID starting with f1fedb28e1bd263d2f8481f440903e36f0d8441fd72aa29e31d1338eee0b4eeb not found: ID does not exist" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.264789 4822 scope.go:117] "RemoveContainer" containerID="4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a" Dec 01 08:30:53 crc kubenswrapper[4822]: E1201 08:30:53.265403 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a\": container with ID starting with 4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a not found: ID does not exist" containerID="4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.265480 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a"} err="failed to get container status \"4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a\": rpc error: code = NotFound desc = could not find container \"4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a\": container with ID starting with 4e839090275ce6de6074f045cedb80c910e5c27e67c2fe99656b4ef1995f823a not found: ID does not exist" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.265532 4822 scope.go:117] "RemoveContainer" containerID="4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0" Dec 01 08:30:53 crc kubenswrapper[4822]: E1201 08:30:53.265939 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0\": container with ID starting with 4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0 not found: ID does not exist" containerID="4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0" Dec 01 08:30:53 crc kubenswrapper[4822]: I1201 08:30:53.265963 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0"} err="failed to get container status \"4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0\": rpc error: code = NotFound desc = could not find container \"4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0\": container with ID starting with 4f59262214c8d07eb67a149952dfcb6650ba66471400bc37a1b443ecae56d7c0 not found: ID does not exist" Dec 01 08:30:54 crc kubenswrapper[4822]: I1201 08:30:54.956375 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:30:54 crc kubenswrapper[4822]: I1201 08:30:54.962544 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" path="/var/lib/kubelet/pods/fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1/volumes" Dec 01 08:30:56 crc kubenswrapper[4822]: I1201 08:30:56.203593 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"a5c5e48601383b61230b820b55d9f5353aa9c6db3a296b16c279f307b7b3933e"} Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.253164 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/util/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.435814 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/util/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.448141 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/pull/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.449480 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/pull/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.607672 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/util/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.637775 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/extract/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.662219 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bw7dl8_d736d36e-1452-4556-ab66-9ef1a6c14828/pull/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.812233 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-f4bff_fd0abaec-7f45-438f-843e-1a1dd2cbf841/kube-rbac-proxy/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.858303 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-f4bff_fd0abaec-7f45-438f-843e-1a1dd2cbf841/manager/0.log" Dec 01 08:31:37 crc kubenswrapper[4822]: I1201 08:31:37.921405 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-pbb9v_43d47ccc-afef-42e7-bc18-df3be5e2b4e0/kube-rbac-proxy/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.038930 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-pbb9v_43d47ccc-afef-42e7-bc18-df3be5e2b4e0/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.091605 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-4xwss_adf58c74-4460-490a-97bb-a2d60a6efffa/kube-rbac-proxy/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.102541 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-4xwss_adf58c74-4460-490a-97bb-a2d60a6efffa/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.258926 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-fkvck_031f17da-46a5-4904-b32c-968dbd5959c1/kube-rbac-proxy/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.367234 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-fkvck_031f17da-46a5-4904-b32c-968dbd5959c1/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.424620 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-275w4_85f27b60-f694-4768-b55e-bb816ed4594b/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.451991 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-275w4_85f27b60-f694-4768-b55e-bb816ed4594b/kube-rbac-proxy/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.556313 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-t2t9n_b6e7914c-28b1-4241-9db9-ebecda9ede7a/kube-rbac-proxy/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.624201 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-t2t9n_b6e7914c-28b1-4241-9db9-ebecda9ede7a/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.700276 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-2s7q5_9f7f1540-19b0-48c0-adab-c10e8bdd0fd3/kube-rbac-proxy/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.852809 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-2s7q5_9f7f1540-19b0-48c0-adab-c10e8bdd0fd3/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.891834 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-k72b6_ee5199ce-b0f1-4753-a317-8d4b95bca11b/manager/0.log" Dec 01 08:31:38 crc kubenswrapper[4822]: I1201 08:31:38.924665 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-k72b6_ee5199ce-b0f1-4753-a317-8d4b95bca11b/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.055451 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-m8rnk_ea009d0a-d9c2-4265-96bf-6153ce222eef/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.126639 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-m8rnk_ea009d0a-d9c2-4265-96bf-6153ce222eef/manager/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.189297 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-vlj27_af698804-e3b6-481d-bd2f-34350bae1b8f/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.221023 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-vlj27_af698804-e3b6-481d-bd2f-34350bae1b8f/manager/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.323662 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-2vpl9_2ed3d718-e591-45b9-9ae7-f6ed765afa35/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.416769 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-2vpl9_2ed3d718-e591-45b9-9ae7-f6ed765afa35/manager/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.569326 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-mbsh5_7fd09048-e506-48e1-9d30-01cbd6117fcc/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.577078 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-mbsh5_7fd09048-e506-48e1-9d30-01cbd6117fcc/manager/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.658991 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-49rk7_4a0f6236-18f0-436a-9544-d76b8d1c3a09/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.843772 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-49rk7_4a0f6236-18f0-436a-9544-d76b8d1c3a09/manager/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.864057 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-6shpm_825c452f-e271-42a4-ba90-d16f50140303/kube-rbac-proxy/0.log" Dec 01 08:31:39 crc kubenswrapper[4822]: I1201 08:31:39.886347 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-6shpm_825c452f-e271-42a4-ba90-d16f50140303/manager/0.log" Dec 01 08:31:40 crc kubenswrapper[4822]: I1201 08:31:40.225530 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb4462f7ll_f8f4cc9b-6768-4f0c-a82e-d4a831291fee/kube-rbac-proxy/0.log" Dec 01 08:31:40 crc kubenswrapper[4822]: I1201 08:31:40.307613 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb4462f7ll_f8f4cc9b-6768-4f0c-a82e-d4a831291fee/manager/0.log" Dec 01 08:31:40 crc kubenswrapper[4822]: I1201 08:31:40.690253 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6ddddd9d6f-5gtb2_d756e5cf-2fd3-4c62-8b35-a9e65a3b1073/operator/0.log" Dec 01 08:31:40 crc kubenswrapper[4822]: I1201 08:31:40.807249 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-lrntz_50c132e0-fec5-40ca-9f09-8fbe39844bd6/registry-server/0.log" Dec 01 08:31:40 crc kubenswrapper[4822]: I1201 08:31:40.919631 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dmqx6_44bcc489-7661-42ff-b164-4bc2fea1a426/kube-rbac-proxy/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.044421 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dmqx6_44bcc489-7661-42ff-b164-4bc2fea1a426/manager/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.185527 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-6wz2z_38214819-c905-4c30-8c6c-e8ea8978656f/kube-rbac-proxy/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.220285 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-656fd97d56-kmpbd_d169afb8-577e-430e-84b1-98f34bdbec2c/manager/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.262735 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-6wz2z_38214819-c905-4c30-8c6c-e8ea8978656f/manager/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.316409 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-wllwr_1e38b2a2-19d5-4375-8c74-c46ca488a520/operator/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.433478 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-wxp4b_3ebc517a-dfe9-4462-b92d-b381d254f028/kube-rbac-proxy/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.460285 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-wxp4b_3ebc517a-dfe9-4462-b92d-b381d254f028/manager/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.500933 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-zgl8s_157e4e2d-79c0-42c6-9dd5-a2669945d731/kube-rbac-proxy/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.617121 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-zgl8s_157e4e2d-79c0-42c6-9dd5-a2669945d731/manager/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.631460 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mtshp_d94674cd-a8c3-4db7-acb1-2a9965fd85e0/kube-rbac-proxy/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.687008 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mtshp_d94674cd-a8c3-4db7-acb1-2a9965fd85e0/manager/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.801559 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-8pmrk_0778e4fe-6293-4282-9978-a2838469affe/kube-rbac-proxy/0.log" Dec 01 08:31:41 crc kubenswrapper[4822]: I1201 08:31:41.838918 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-8pmrk_0778e4fe-6293-4282-9978-a2838469affe/manager/0.log" Dec 01 08:31:59 crc kubenswrapper[4822]: I1201 08:31:59.760008 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-qhchn_e411fb84-4e27-4611-80b5-9fd52e71441e/control-plane-machine-set-operator/0.log" Dec 01 08:31:59 crc kubenswrapper[4822]: I1201 08:31:59.909091 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6nbbk_0126cc59-9259-43c1-91ea-cdc05047bbee/kube-rbac-proxy/0.log" Dec 01 08:31:59 crc kubenswrapper[4822]: I1201 08:31:59.913848 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6nbbk_0126cc59-9259-43c1-91ea-cdc05047bbee/machine-api-operator/0.log" Dec 01 08:32:11 crc kubenswrapper[4822]: I1201 08:32:11.919678 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-2zlrk_99f4689f-66e4-4b45-94e3-55791c3a186b/cert-manager-controller/0.log" Dec 01 08:32:12 crc kubenswrapper[4822]: I1201 08:32:12.044679 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-j2jgj_0b41c100-758b-41ab-8a96-f46b3be4a01c/cert-manager-cainjector/0.log" Dec 01 08:32:12 crc kubenswrapper[4822]: I1201 08:32:12.082292 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-ph822_7fb450da-1208-4fac-a91a-a83d60cbae08/cert-manager-webhook/0.log" Dec 01 08:32:24 crc kubenswrapper[4822]: I1201 08:32:24.519792 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-v5nnm_3bf5665f-7be9-4d4e-a750-611d44124963/nmstate-console-plugin/0.log" Dec 01 08:32:24 crc kubenswrapper[4822]: I1201 08:32:24.699358 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-cd8pn_fea47134-e547-45db-8461-8b4154f25225/nmstate-handler/0.log" Dec 01 08:32:24 crc kubenswrapper[4822]: I1201 08:32:24.743171 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-kkt2f_005acf82-ce0c-4762-b26d-b16df2767b45/kube-rbac-proxy/0.log" Dec 01 08:32:24 crc kubenswrapper[4822]: I1201 08:32:24.755193 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-kkt2f_005acf82-ce0c-4762-b26d-b16df2767b45/nmstate-metrics/0.log" Dec 01 08:32:24 crc kubenswrapper[4822]: I1201 08:32:24.891050 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-dnlrq_5869a4b0-9484-44bc-b62e-6e1a48bf87f6/nmstate-operator/0.log" Dec 01 08:32:24 crc kubenswrapper[4822]: I1201 08:32:24.957424 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-nwxmp_6cf74d74-242e-44f7-a881-01ae90b1c7be/nmstate-webhook/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.117591 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-b25m8_0bfbda8a-cb53-46e0-8deb-180e26af5e36/kube-rbac-proxy/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.396495 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-frr-files/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.431919 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-b25m8_0bfbda8a-cb53-46e0-8deb-180e26af5e36/controller/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.518082 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-reloader/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.551124 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-frr-files/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.562045 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-metrics/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.645018 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-reloader/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.807119 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-frr-files/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.828707 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-reloader/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.838961 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-metrics/0.log" Dec 01 08:32:39 crc kubenswrapper[4822]: I1201 08:32:39.839109 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-metrics/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.000242 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-metrics/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.024016 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/controller/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.030335 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-reloader/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.039934 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/cp-frr-files/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.208935 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/frr-metrics/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.213765 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/kube-rbac-proxy/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.264146 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/kube-rbac-proxy-frr/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.433531 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/reloader/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.503437 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-k644t_5824cc48-45a9-4c0e-80f1-e8305911cccc/frr-k8s-webhook-server/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.724942 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f6cc7bfbc-4z284_c0272de7-59c2-4625-9cfb-6e6daaf65437/manager/0.log" Dec 01 08:32:40 crc kubenswrapper[4822]: I1201 08:32:40.857524 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-f677cc54b-xd4c5_50359fe9-284a-489c-85ba-619ce67e1eb6/webhook-server/0.log" Dec 01 08:32:41 crc kubenswrapper[4822]: I1201 08:32:41.007905 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-glkvk_4acda3d2-5fc5-4827-bdab-6369a308f6aa/kube-rbac-proxy/0.log" Dec 01 08:32:41 crc kubenswrapper[4822]: I1201 08:32:41.467734 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-glkvk_4acda3d2-5fc5-4827-bdab-6369a308f6aa/speaker/0.log" Dec 01 08:32:41 crc kubenswrapper[4822]: I1201 08:32:41.697365 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6lntj_59f52292-bc58-4afb-b290-6b1ab09a2187/frr/0.log" Dec 01 08:32:44 crc kubenswrapper[4822]: E1201 08:32:44.102071 4822 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.152s" Dec 01 08:32:54 crc kubenswrapper[4822]: I1201 08:32:54.676212 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/util/0.log" Dec 01 08:32:54 crc kubenswrapper[4822]: I1201 08:32:54.851060 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/util/0.log" Dec 01 08:32:54 crc kubenswrapper[4822]: I1201 08:32:54.870851 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/pull/0.log" Dec 01 08:32:54 crc kubenswrapper[4822]: I1201 08:32:54.914629 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/pull/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.067091 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/pull/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.072038 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/util/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.074503 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9ptlq_238d02e4-3800-4fdd-8e17-2b4e0261eea8/extract/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.208228 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/util/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.365663 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/util/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.370338 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/pull/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.390277 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/pull/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.531616 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/pull/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.542245 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/util/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.601875 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fpngvf_54b66714-bfae-4121-932c-e03181665394/extract/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.719153 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/util/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.845291 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/util/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.881821 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/pull/0.log" Dec 01 08:32:55 crc kubenswrapper[4822]: I1201 08:32:55.881942 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/pull/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.030036 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/pull/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.053523 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/extract/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.057852 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832hmls_04f4b9dc-ac07-4730-9fed-3d02c5144397/util/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.190347 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/extract-utilities/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.369680 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/extract-content/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.374983 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/extract-content/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.404343 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/extract-utilities/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.582693 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/extract-utilities/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.614354 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/extract-content/0.log" Dec 01 08:32:56 crc kubenswrapper[4822]: I1201 08:32:56.767711 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/extract-utilities/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.002957 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/extract-content/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.019821 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/extract-utilities/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.046818 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/extract-content/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.235818 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/extract-utilities/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.236773 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/extract-content/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.378928 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-nwqwt_0b600d64-aa25-4b7a-bce7-482503ba8f7d/registry-server/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.523145 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/2.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.655883 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fltjl_fabd8807-fa3c-4319-aeb8-f5f8ee2a2a74/marketplace-operator/1.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.733671 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/extract-utilities/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.832653 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vgdjq_eba72c1f-8fb5-45e5-9363-cf22c664614d/registry-server/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.916146 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/extract-utilities/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.936959 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/extract-content/0.log" Dec 01 08:32:57 crc kubenswrapper[4822]: I1201 08:32:57.969855 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/extract-content/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.154936 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/extract-content/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.164412 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/extract-utilities/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.213469 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/extract-utilities/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.422455 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/extract-content/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.423205 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/extract-utilities/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.425490 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8fscd_c77349ec-bedd-4aba-a58f-b328a5f4b877/registry-server/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.517368 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/extract-content/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.609665 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/extract-utilities/0.log" Dec 01 08:32:58 crc kubenswrapper[4822]: I1201 08:32:58.648330 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/extract-content/0.log" Dec 01 08:32:59 crc kubenswrapper[4822]: I1201 08:32:59.294336 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-5v6h4_2825e8f7-d99d-4f07-a6db-b1c976946b30/registry-server/0.log" Dec 01 08:33:12 crc kubenswrapper[4822]: I1201 08:33:12.542624 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:33:12 crc kubenswrapper[4822]: I1201 08:33:12.543292 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:33:42 crc kubenswrapper[4822]: I1201 08:33:42.542830 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:33:42 crc kubenswrapper[4822]: I1201 08:33:42.543357 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:34:04 crc kubenswrapper[4822]: I1201 08:34:04.696009 4822 generic.go:334] "Generic (PLEG): container finished" podID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerID="997b1611ad65124e039b824655242f4c2170ad2335a3fee2713edd51b9ec6982" exitCode=0 Dec 01 08:34:04 crc kubenswrapper[4822]: I1201 08:34:04.696605 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" event={"ID":"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0","Type":"ContainerDied","Data":"997b1611ad65124e039b824655242f4c2170ad2335a3fee2713edd51b9ec6982"} Dec 01 08:34:04 crc kubenswrapper[4822]: I1201 08:34:04.697351 4822 scope.go:117] "RemoveContainer" containerID="997b1611ad65124e039b824655242f4c2170ad2335a3fee2713edd51b9ec6982" Dec 01 08:34:05 crc kubenswrapper[4822]: I1201 08:34:05.401723 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wt5c2_must-gather-ksz2h_2d03fdc4-435a-4d0f-99e6-92cef7c85ae0/gather/0.log" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.514363 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wt5c2/must-gather-ksz2h"] Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.515301 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="copy" containerID="cri-o://0f8229835e2cb825778e8df659aeeaeb67980401fcc870e11e7c2f4ccdc4af8d" gracePeriod=2 Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.522202 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wt5c2/must-gather-ksz2h"] Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.543273 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.543342 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.543395 4822 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.544417 4822 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a5c5e48601383b61230b820b55d9f5353aa9c6db3a296b16c279f307b7b3933e"} pod="openshift-machine-config-operator/machine-config-daemon-2cz64" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.544488 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" containerID="cri-o://a5c5e48601383b61230b820b55d9f5353aa9c6db3a296b16c279f307b7b3933e" gracePeriod=600 Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.759181 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wt5c2_must-gather-ksz2h_2d03fdc4-435a-4d0f-99e6-92cef7c85ae0/copy/0.log" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.759735 4822 generic.go:334] "Generic (PLEG): container finished" podID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerID="0f8229835e2cb825778e8df659aeeaeb67980401fcc870e11e7c2f4ccdc4af8d" exitCode=143 Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.766679 4822 generic.go:334] "Generic (PLEG): container finished" podID="a6c6a838-3829-4058-aa59-1302d07e4507" containerID="a5c5e48601383b61230b820b55d9f5353aa9c6db3a296b16c279f307b7b3933e" exitCode=0 Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.766732 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerDied","Data":"a5c5e48601383b61230b820b55d9f5353aa9c6db3a296b16c279f307b7b3933e"} Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.766771 4822 scope.go:117] "RemoveContainer" containerID="6142213bf18bfca4b0d2ed1ff13850a9886f6d76757a71ed82e533bdc97cd302" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.921516 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wt5c2_must-gather-ksz2h_2d03fdc4-435a-4d0f-99e6-92cef7c85ae0/copy/0.log" Dec 01 08:34:12 crc kubenswrapper[4822]: I1201 08:34:12.921967 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.141707 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-must-gather-output\") pod \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.141902 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-927cb\" (UniqueName: \"kubernetes.io/projected/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-kube-api-access-927cb\") pod \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\" (UID: \"2d03fdc4-435a-4d0f-99e6-92cef7c85ae0\") " Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.149082 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-kube-api-access-927cb" (OuterVolumeSpecName: "kube-api-access-927cb") pod "2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" (UID: "2d03fdc4-435a-4d0f-99e6-92cef7c85ae0"). InnerVolumeSpecName "kube-api-access-927cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.234994 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" (UID: "2d03fdc4-435a-4d0f-99e6-92cef7c85ae0"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.244265 4822 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.244304 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-927cb\" (UniqueName: \"kubernetes.io/projected/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0-kube-api-access-927cb\") on node \"crc\" DevicePath \"\"" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.774939 4822 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wt5c2_must-gather-ksz2h_2d03fdc4-435a-4d0f-99e6-92cef7c85ae0/copy/0.log" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.775351 4822 scope.go:117] "RemoveContainer" containerID="0f8229835e2cb825778e8df659aeeaeb67980401fcc870e11e7c2f4ccdc4af8d" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.775452 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wt5c2/must-gather-ksz2h" Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.777741 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" event={"ID":"a6c6a838-3829-4058-aa59-1302d07e4507","Type":"ContainerStarted","Data":"08dc99983f21dbd147d80b909620cb332a7902ae3035bb47a2f7da83751dd4c2"} Dec 01 08:34:13 crc kubenswrapper[4822]: I1201 08:34:13.794674 4822 scope.go:117] "RemoveContainer" containerID="997b1611ad65124e039b824655242f4c2170ad2335a3fee2713edd51b9ec6982" Dec 01 08:34:14 crc kubenswrapper[4822]: I1201 08:34:14.960119 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" path="/var/lib/kubelet/pods/2d03fdc4-435a-4d0f-99e6-92cef7c85ae0/volumes" Dec 01 08:35:52 crc kubenswrapper[4822]: I1201 08:35:52.227864 4822 scope.go:117] "RemoveContainer" containerID="9294cf514719f7169e03adea9c33d534acdd9214a5697d2d66e97dff5d033631" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.577136 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gvtvg"] Dec 01 08:36:03 crc kubenswrapper[4822]: E1201 08:36:03.582250 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="extract-utilities" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582301 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="extract-utilities" Dec 01 08:36:03 crc kubenswrapper[4822]: E1201 08:36:03.582327 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="registry-server" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582341 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="registry-server" Dec 01 08:36:03 crc kubenswrapper[4822]: E1201 08:36:03.582367 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="copy" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582381 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="copy" Dec 01 08:36:03 crc kubenswrapper[4822]: E1201 08:36:03.582417 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="extract-content" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582430 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="extract-content" Dec 01 08:36:03 crc kubenswrapper[4822]: E1201 08:36:03.582458 4822 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="gather" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582471 4822 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="gather" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582950 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="copy" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.582995 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf960a5-7cc4-4ed2-8bcb-2c64cd5238c1" containerName="registry-server" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.583071 4822 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d03fdc4-435a-4d0f-99e6-92cef7c85ae0" containerName="gather" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.585312 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.593482 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gvtvg"] Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.766032 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr9fp\" (UniqueName: \"kubernetes.io/projected/b9851b67-4e73-4382-abf2-df7b04e28404-kube-api-access-qr9fp\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.766127 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-utilities\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.766212 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-catalog-content\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.867832 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-utilities\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.867937 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-catalog-content\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.867961 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr9fp\" (UniqueName: \"kubernetes.io/projected/b9851b67-4e73-4382-abf2-df7b04e28404-kube-api-access-qr9fp\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.868485 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-utilities\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.868506 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-catalog-content\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.888345 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr9fp\" (UniqueName: \"kubernetes.io/projected/b9851b67-4e73-4382-abf2-df7b04e28404-kube-api-access-qr9fp\") pod \"community-operators-gvtvg\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:03 crc kubenswrapper[4822]: I1201 08:36:03.916880 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.074267 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gvtvg"] Dec 01 08:36:05 crc kubenswrapper[4822]: W1201 08:36:05.082317 4822 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9851b67_4e73_4382_abf2_df7b04e28404.slice/crio-b06de465aac17fce4e65a2de9ec164d7aaf1bb8f58c9c49f0e2748fce5a41a23 WatchSource:0}: Error finding container b06de465aac17fce4e65a2de9ec164d7aaf1bb8f58c9c49f0e2748fce5a41a23: Status 404 returned error can't find the container with id b06de465aac17fce4e65a2de9ec164d7aaf1bb8f58c9c49f0e2748fce5a41a23 Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.771089 4822 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j4x4g"] Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.774299 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.789641 4822 generic.go:334] "Generic (PLEG): container finished" podID="b9851b67-4e73-4382-abf2-df7b04e28404" containerID="9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78" exitCode=0 Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.789753 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvtvg" event={"ID":"b9851b67-4e73-4382-abf2-df7b04e28404","Type":"ContainerDied","Data":"9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78"} Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.789816 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvtvg" event={"ID":"b9851b67-4e73-4382-abf2-df7b04e28404","Type":"ContainerStarted","Data":"b06de465aac17fce4e65a2de9ec164d7aaf1bb8f58c9c49f0e2748fce5a41a23"} Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.791697 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j4x4g"] Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.792191 4822 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.799506 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-utilities\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.799670 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-catalog-content\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.799757 4822 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghzzg\" (UniqueName: \"kubernetes.io/projected/520037e0-140b-4c8e-907d-437b956c713e-kube-api-access-ghzzg\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.901533 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghzzg\" (UniqueName: \"kubernetes.io/projected/520037e0-140b-4c8e-907d-437b956c713e-kube-api-access-ghzzg\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.901682 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-utilities\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.901754 4822 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-catalog-content\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.902324 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-utilities\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.905000 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-catalog-content\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:05 crc kubenswrapper[4822]: I1201 08:36:05.940593 4822 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghzzg\" (UniqueName: \"kubernetes.io/projected/520037e0-140b-4c8e-907d-437b956c713e-kube-api-access-ghzzg\") pod \"redhat-operators-j4x4g\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:06 crc kubenswrapper[4822]: I1201 08:36:06.123166 4822 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:06 crc kubenswrapper[4822]: I1201 08:36:06.574029 4822 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j4x4g"] Dec 01 08:36:06 crc kubenswrapper[4822]: I1201 08:36:06.798838 4822 generic.go:334] "Generic (PLEG): container finished" podID="520037e0-140b-4c8e-907d-437b956c713e" containerID="6db96932e376df47c364779ab25b062dd6b66f9092a47f1d92fdd1aa9845f56d" exitCode=0 Dec 01 08:36:06 crc kubenswrapper[4822]: I1201 08:36:06.798927 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j4x4g" event={"ID":"520037e0-140b-4c8e-907d-437b956c713e","Type":"ContainerDied","Data":"6db96932e376df47c364779ab25b062dd6b66f9092a47f1d92fdd1aa9845f56d"} Dec 01 08:36:06 crc kubenswrapper[4822]: I1201 08:36:06.799213 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j4x4g" event={"ID":"520037e0-140b-4c8e-907d-437b956c713e","Type":"ContainerStarted","Data":"3eca5bbea2075e03c296a2628149320b02cab9deb1620925f76d52c00a9033b6"} Dec 01 08:36:07 crc kubenswrapper[4822]: I1201 08:36:07.814109 4822 generic.go:334] "Generic (PLEG): container finished" podID="b9851b67-4e73-4382-abf2-df7b04e28404" containerID="f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c" exitCode=0 Dec 01 08:36:07 crc kubenswrapper[4822]: I1201 08:36:07.814181 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvtvg" event={"ID":"b9851b67-4e73-4382-abf2-df7b04e28404","Type":"ContainerDied","Data":"f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c"} Dec 01 08:36:08 crc kubenswrapper[4822]: I1201 08:36:08.826710 4822 generic.go:334] "Generic (PLEG): container finished" podID="520037e0-140b-4c8e-907d-437b956c713e" containerID="b123071923b5583ddd6d5b46fb58eb9a32ffa0069042f4218676925bdfac2c5c" exitCode=0 Dec 01 08:36:08 crc kubenswrapper[4822]: I1201 08:36:08.827011 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j4x4g" event={"ID":"520037e0-140b-4c8e-907d-437b956c713e","Type":"ContainerDied","Data":"b123071923b5583ddd6d5b46fb58eb9a32ffa0069042f4218676925bdfac2c5c"} Dec 01 08:36:08 crc kubenswrapper[4822]: I1201 08:36:08.831390 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvtvg" event={"ID":"b9851b67-4e73-4382-abf2-df7b04e28404","Type":"ContainerStarted","Data":"bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0"} Dec 01 08:36:08 crc kubenswrapper[4822]: I1201 08:36:08.878677 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gvtvg" podStartSLOduration=3.158687527 podStartE2EDuration="5.878660418s" podCreationTimestamp="2025-12-01 08:36:03 +0000 UTC" firstStartedPulling="2025-12-01 08:36:05.791518831 +0000 UTC m=+6321.112326567" lastFinishedPulling="2025-12-01 08:36:08.511491772 +0000 UTC m=+6323.832299458" observedRunningTime="2025-12-01 08:36:08.872541655 +0000 UTC m=+6324.193349341" watchObservedRunningTime="2025-12-01 08:36:08.878660418 +0000 UTC m=+6324.199468104" Dec 01 08:36:09 crc kubenswrapper[4822]: I1201 08:36:09.841755 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j4x4g" event={"ID":"520037e0-140b-4c8e-907d-437b956c713e","Type":"ContainerStarted","Data":"1d42072940025d3f36392bd069509361a36851eaf4623e8792f7a1855f6e0199"} Dec 01 08:36:09 crc kubenswrapper[4822]: I1201 08:36:09.864312 4822 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j4x4g" podStartSLOduration=2.3923740540000002 podStartE2EDuration="4.864298534s" podCreationTimestamp="2025-12-01 08:36:05 +0000 UTC" firstStartedPulling="2025-12-01 08:36:06.800729641 +0000 UTC m=+6322.121537327" lastFinishedPulling="2025-12-01 08:36:09.272654101 +0000 UTC m=+6324.593461807" observedRunningTime="2025-12-01 08:36:09.862542404 +0000 UTC m=+6325.183350110" watchObservedRunningTime="2025-12-01 08:36:09.864298534 +0000 UTC m=+6325.185106220" Dec 01 08:36:12 crc kubenswrapper[4822]: I1201 08:36:12.543607 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:36:12 crc kubenswrapper[4822]: I1201 08:36:12.544085 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 08:36:13 crc kubenswrapper[4822]: I1201 08:36:13.917798 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:13 crc kubenswrapper[4822]: I1201 08:36:13.918163 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:13 crc kubenswrapper[4822]: I1201 08:36:13.983481 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:14 crc kubenswrapper[4822]: I1201 08:36:14.934228 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:15 crc kubenswrapper[4822]: I1201 08:36:15.013423 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gvtvg"] Dec 01 08:36:16 crc kubenswrapper[4822]: I1201 08:36:16.123663 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:16 crc kubenswrapper[4822]: I1201 08:36:16.124018 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:16 crc kubenswrapper[4822]: I1201 08:36:16.205449 4822 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:16 crc kubenswrapper[4822]: I1201 08:36:16.903104 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gvtvg" podUID="b9851b67-4e73-4382-abf2-df7b04e28404" containerName="registry-server" containerID="cri-o://bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0" gracePeriod=2 Dec 01 08:36:16 crc kubenswrapper[4822]: I1201 08:36:16.984576 4822 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.361831 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j4x4g"] Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.821761 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.914066 4822 generic.go:334] "Generic (PLEG): container finished" podID="b9851b67-4e73-4382-abf2-df7b04e28404" containerID="bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0" exitCode=0 Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.914161 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvtvg" event={"ID":"b9851b67-4e73-4382-abf2-df7b04e28404","Type":"ContainerDied","Data":"bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0"} Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.914254 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvtvg" event={"ID":"b9851b67-4e73-4382-abf2-df7b04e28404","Type":"ContainerDied","Data":"b06de465aac17fce4e65a2de9ec164d7aaf1bb8f58c9c49f0e2748fce5a41a23"} Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.914304 4822 scope.go:117] "RemoveContainer" containerID="bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.914336 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvtvg" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.933932 4822 scope.go:117] "RemoveContainer" containerID="f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.952011 4822 scope.go:117] "RemoveContainer" containerID="9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.974957 4822 scope.go:117] "RemoveContainer" containerID="bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0" Dec 01 08:36:17 crc kubenswrapper[4822]: E1201 08:36:17.975429 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0\": container with ID starting with bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0 not found: ID does not exist" containerID="bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.975904 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0"} err="failed to get container status \"bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0\": rpc error: code = NotFound desc = could not find container \"bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0\": container with ID starting with bd49ee0c8f2cbfad1059af649b7f9981c65b898085e8ef9fc8cc715a094634c0 not found: ID does not exist" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.975945 4822 scope.go:117] "RemoveContainer" containerID="f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c" Dec 01 08:36:17 crc kubenswrapper[4822]: E1201 08:36:17.976320 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c\": container with ID starting with f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c not found: ID does not exist" containerID="f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.976372 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c"} err="failed to get container status \"f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c\": rpc error: code = NotFound desc = could not find container \"f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c\": container with ID starting with f8c0a1599b869f70c7861bab0dc3013da10d61eb88840ca91bfc811b52d29d5c not found: ID does not exist" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.976391 4822 scope.go:117] "RemoveContainer" containerID="9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78" Dec 01 08:36:17 crc kubenswrapper[4822]: E1201 08:36:17.976714 4822 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78\": container with ID starting with 9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78 not found: ID does not exist" containerID="9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.976749 4822 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78"} err="failed to get container status \"9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78\": rpc error: code = NotFound desc = could not find container \"9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78\": container with ID starting with 9f9923d3de2f04b3649a09c09a05bcc551a317ee076c020b273ab71ec9764f78 not found: ID does not exist" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.985519 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-utilities\") pod \"b9851b67-4e73-4382-abf2-df7b04e28404\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.985691 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-catalog-content\") pod \"b9851b67-4e73-4382-abf2-df7b04e28404\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.985738 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr9fp\" (UniqueName: \"kubernetes.io/projected/b9851b67-4e73-4382-abf2-df7b04e28404-kube-api-access-qr9fp\") pod \"b9851b67-4e73-4382-abf2-df7b04e28404\" (UID: \"b9851b67-4e73-4382-abf2-df7b04e28404\") " Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.986512 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-utilities" (OuterVolumeSpecName: "utilities") pod "b9851b67-4e73-4382-abf2-df7b04e28404" (UID: "b9851b67-4e73-4382-abf2-df7b04e28404"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:36:17 crc kubenswrapper[4822]: I1201 08:36:17.992258 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9851b67-4e73-4382-abf2-df7b04e28404-kube-api-access-qr9fp" (OuterVolumeSpecName: "kube-api-access-qr9fp") pod "b9851b67-4e73-4382-abf2-df7b04e28404" (UID: "b9851b67-4e73-4382-abf2-df7b04e28404"). InnerVolumeSpecName "kube-api-access-qr9fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.051762 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b9851b67-4e73-4382-abf2-df7b04e28404" (UID: "b9851b67-4e73-4382-abf2-df7b04e28404"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.087377 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.087722 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9851b67-4e73-4382-abf2-df7b04e28404-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.087821 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr9fp\" (UniqueName: \"kubernetes.io/projected/b9851b67-4e73-4382-abf2-df7b04e28404-kube-api-access-qr9fp\") on node \"crc\" DevicePath \"\"" Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.255446 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gvtvg"] Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.261198 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gvtvg"] Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.920843 4822 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j4x4g" podUID="520037e0-140b-4c8e-907d-437b956c713e" containerName="registry-server" containerID="cri-o://1d42072940025d3f36392bd069509361a36851eaf4623e8792f7a1855f6e0199" gracePeriod=2 Dec 01 08:36:18 crc kubenswrapper[4822]: I1201 08:36:18.961818 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9851b67-4e73-4382-abf2-df7b04e28404" path="/var/lib/kubelet/pods/b9851b67-4e73-4382-abf2-df7b04e28404/volumes" Dec 01 08:36:20 crc kubenswrapper[4822]: I1201 08:36:20.961150 4822 generic.go:334] "Generic (PLEG): container finished" podID="520037e0-140b-4c8e-907d-437b956c713e" containerID="1d42072940025d3f36392bd069509361a36851eaf4623e8792f7a1855f6e0199" exitCode=0 Dec 01 08:36:20 crc kubenswrapper[4822]: I1201 08:36:20.962513 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j4x4g" event={"ID":"520037e0-140b-4c8e-907d-437b956c713e","Type":"ContainerDied","Data":"1d42072940025d3f36392bd069509361a36851eaf4623e8792f7a1855f6e0199"} Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.236347 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.340665 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghzzg\" (UniqueName: \"kubernetes.io/projected/520037e0-140b-4c8e-907d-437b956c713e-kube-api-access-ghzzg\") pod \"520037e0-140b-4c8e-907d-437b956c713e\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.340738 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-utilities\") pod \"520037e0-140b-4c8e-907d-437b956c713e\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.340779 4822 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-catalog-content\") pod \"520037e0-140b-4c8e-907d-437b956c713e\" (UID: \"520037e0-140b-4c8e-907d-437b956c713e\") " Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.341897 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-utilities" (OuterVolumeSpecName: "utilities") pod "520037e0-140b-4c8e-907d-437b956c713e" (UID: "520037e0-140b-4c8e-907d-437b956c713e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.359098 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/520037e0-140b-4c8e-907d-437b956c713e-kube-api-access-ghzzg" (OuterVolumeSpecName: "kube-api-access-ghzzg") pod "520037e0-140b-4c8e-907d-437b956c713e" (UID: "520037e0-140b-4c8e-907d-437b956c713e"). InnerVolumeSpecName "kube-api-access-ghzzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.443401 4822 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghzzg\" (UniqueName: \"kubernetes.io/projected/520037e0-140b-4c8e-907d-437b956c713e-kube-api-access-ghzzg\") on node \"crc\" DevicePath \"\"" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.443445 4822 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.495274 4822 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "520037e0-140b-4c8e-907d-437b956c713e" (UID: "520037e0-140b-4c8e-907d-437b956c713e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.545102 4822 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/520037e0-140b-4c8e-907d-437b956c713e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.972412 4822 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j4x4g" event={"ID":"520037e0-140b-4c8e-907d-437b956c713e","Type":"ContainerDied","Data":"3eca5bbea2075e03c296a2628149320b02cab9deb1620925f76d52c00a9033b6"} Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.972492 4822 scope.go:117] "RemoveContainer" containerID="1d42072940025d3f36392bd069509361a36851eaf4623e8792f7a1855f6e0199" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.972624 4822 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j4x4g" Dec 01 08:36:21 crc kubenswrapper[4822]: I1201 08:36:21.994960 4822 scope.go:117] "RemoveContainer" containerID="b123071923b5583ddd6d5b46fb58eb9a32ffa0069042f4218676925bdfac2c5c" Dec 01 08:36:22 crc kubenswrapper[4822]: I1201 08:36:22.016634 4822 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j4x4g"] Dec 01 08:36:22 crc kubenswrapper[4822]: I1201 08:36:22.022161 4822 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j4x4g"] Dec 01 08:36:22 crc kubenswrapper[4822]: I1201 08:36:22.053641 4822 scope.go:117] "RemoveContainer" containerID="6db96932e376df47c364779ab25b062dd6b66f9092a47f1d92fdd1aa9845f56d" Dec 01 08:36:22 crc kubenswrapper[4822]: I1201 08:36:22.977983 4822 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="520037e0-140b-4c8e-907d-437b956c713e" path="/var/lib/kubelet/pods/520037e0-140b-4c8e-907d-437b956c713e/volumes" Dec 01 08:36:42 crc kubenswrapper[4822]: I1201 08:36:42.542643 4822 patch_prober.go:28] interesting pod/machine-config-daemon-2cz64 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 08:36:42 crc kubenswrapper[4822]: I1201 08:36:42.543166 4822 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2cz64" podUID="a6c6a838-3829-4058-aa59-1302d07e4507" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113251676024454 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113251677017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113234370016504 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113234370015454 5ustar corecore